Ejemplo n.º 1
1
class DB:
    def __init__(self,db_path):
        self.db = TinyDB(db_path)

    def add(self, data):
        # Only add it if you can't find it
        Track = Query()
        if not self.db.get(Track.display_id == data['display_id']):
            return self.db.insert(data)

    def searchById(self, video_id):
        Track = Query()
        return self.db.get(Track.display_id == video_id)

    def search(self, text):
        pattern = re.compile(text,re.IGNORECASE)
        def test(txt):
            return pattern.search(txt)

        Track = Query()
        q = Track.title.test(test) | Track.description.test(test)
        return self.db.search(q)

    def all(self):
        return self.db.all()
Ejemplo n.º 2
0
class Test_007_Delete_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_delete_not_exist(self):
		print("case 7 Delete Non-existing data by valid query")
		result=self.db.remove(where('Name') == 'Wendy')
		self.assertEqual(result,None)	
Ejemplo n.º 3
0
class Test_006_Modify_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_modify_not_exist(self):
		print("case 6 modify Non-existing data by valid query")
		result=self.db.update({'int': 10}, where('Name') == 'Wendy')
		self.assertEqual(result,None)
Ejemplo n.º 4
0
class Test_005_Search_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_search_not_exist(self):
		print("case 5 search Non-existing data by valid query")
		result=self.db.search(where('Name') == 'Wendy')
		self.assertEqual(result,[])
Ejemplo n.º 5
0
class Test_001_Insert_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_insert_valid_exist(self):
		print("case 1 insert data by valid query")
		self.db.insert({'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		result=self.db.search(where('Name') == 'Greg')
		self.assertEqual(result,[{'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1}])
Ejemplo n.º 6
0
def index():
    form = SearchForm()

    query = request.args.get('query', '').strip()

    db = TinyDB(recipyGui.config.get('tinydb'))

    if not query:
        runs = db.all()
    else:
        # Search run outputs using the query string
        runs = db.search(
            where('outputs').any(lambda x: listsearch(query, x)) |
            where('inputs').any(lambda x: listsearch(query, x)) |
            where('script').search(query) |
            where('notes').search(query) |
            where('unique_id').search(query))
    runs = sorted(runs, key = lambda x: parse(x['date'].replace('{TinyDate}:', '')) if x['date'] is not None else x['eid'], reverse=True)

    run_ids = []
    for run in runs:
        if 'notes' in run.keys():
            run['notes'] = str(escape(run['notes']))
        run_ids.append(run.eid)

    db.close()

    return render_template('list.html', runs=runs, query=query, form=form,
                           run_ids=str(run_ids),
                           dbfile=recipyGui.config.get('tinydb'))
Ejemplo n.º 7
0
def test_serialisation_of_pandas_dataframe(tmpdir):
    from sacred.observers.tinydb_hashfs import (DataFrameSerializer,
                                                SeriesSerializer)
    from tinydb_serialization import SerializationMiddleware

    import numpy as np
    import pandas as pd

    # Setup Serialisation object for non list/dict objects
    serialization_store = SerializationMiddleware()
    serialization_store.register_serializer(DataFrameSerializer(),
                                            'TinyDataFrame')
    serialization_store.register_serializer(SeriesSerializer(),
                                            'TinySeries')

    db = TinyDB(os.path.join(tmpdir.strpath, 'metadata.json'),
                storage=serialization_store)

    df = pd.DataFrame(np.eye(3), columns=list('ABC'))
    series = pd.Series(np.ones(5))

    document = {
        'foo': 'bar',
        'some_dataframe': df,
        'nested': {
            'ones': series
        }
    }

    db.insert(document)
    returned_doc = db.all()[0]

    assert returned_doc['foo'] == 'bar'
    assert (returned_doc['some_dataframe'] == df).all().all()
    assert (returned_doc['nested']['ones'] == series).all()
Ejemplo n.º 8
0
def test_serialisation_of_numpy_ndarray(tmpdir):
    from sacred.observers.tinydb_hashfs import NdArraySerializer
    from tinydb_serialization import SerializationMiddleware
    import numpy as np

    # Setup Serialisation object for non list/dict objects
    serialization_store = SerializationMiddleware()
    serialization_store.register_serializer(NdArraySerializer(), 'TinyArray')

    db = TinyDB(os.path.join(tmpdir.strpath, 'metadata.json'),
                storage=serialization_store)

    eye_mat = np.eye(3)
    ones_array = np.ones(5)

    document = {
        'foo': 'bar',
        'some_array': eye_mat,
        'nested': {
            'ones': ones_array
        }
    }

    db.insert(document)
    returned_doc = db.all()[0]

    assert returned_doc['foo'] == 'bar'
    assert (returned_doc['some_array'] == eye_mat).all()
    assert (returned_doc['nested']['ones'] == ones_array).all()
Ejemplo n.º 9
0
class pcDB:
    def __init__(self,table="default"):
        #'/path/to/db.json'
        path=''
        self.table=table
        self.db = TinyDB(path).table(table)
    def insert(self,_dict):
        '''
        :return:
        '''
        self.db.insert(_dict)
        # db.insert({'int': 1, 'char': 'a'})
        # db.insert({'int': 1, 'char': 'b'})
        pass
    def getAll(self):
        '''
        not param just get all data
        :return:
        '''
        return self.db.all()
        #db.search()
        pass
    pass
#
# from tinydb.storages import JSONStorage
# from tinydb.middlewares import CachingMiddleware
# db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage))
Ejemplo n.º 10
0
    def test_write_first_cache(self):
        """test write behavior on first pass (cache-buster mode)"""
        self.test_clear_existing_cache()    #blowup existing cache again

        dummy_data = forecast_utils.parse_emd_data(DEMO_DATA['result'])

        forecast_utils.write_prediction_cache(
            self.region_id,
            self.type_id,
            dummy_data,
            cache_path=self.cache_path
        )

        assert path.isfile(self.cache_filepath)

        tdb = TinyDB(self.cache_filepath)

        data = tdb.all()[0]

        keys_list = [
            'cache_date',
            'region_id',
            'type_id',
            'lastWrite',
            'prediction'
        ]
        assert set(keys_list) == set(data.keys())
        dummy_str_data = dummy_data.to_json(
            date_format='iso',
            orient='records'
        )
        cached_data = pd.read_json(data['prediction'])

        assert data['prediction'] == dummy_str_data
        tdb.close()
Ejemplo n.º 11
0
def update_statistic():
    db = TinyDB("data/db.json")
    rows = db.all()
    rows.sort(key=lambda x: int(x['rid']), reverse=True)
    levels = {1: [], 2: [], 3: []}
    for row in rows:
        levels[row['level']].append(row)

    out = []
    header = '|%-35s|%-40s|%-6s|%-10s|%5s|%10s|' % ("round", "problem", "solved", "ave_pts",
            "rate", "submission")
    for k, v in levels.items():
        out.append('# LEVEL %d' % k)
        out.append('-' * len(header))
        out.append(header)
        out.append('-' * len(header))
        v.sort(key=lambda x: difficulty(x), reverse=True)
        for i in v:
            out.append('|%-35s|%-40s|%-6s|%-10.2lf|%-4.3lf|%10s|' % (i['round'], i['name'], i['solved'],
                i['average_pts'], i['correct_rate'], i['submissions']))
        out.append('*' * len(header))
    with open("data/statistic.txt", 'w') as f:
        for i in out:
            f.write(i + '\n')
    print '>>>> data/statistic.txt has been updated.'
Ejemplo n.º 12
0
def get_coffees(active):
  name = ''
  name_link = ''
  nav_links = []
  description = ''
  db = TinyDB('db/coffee.json')
  coffee = db.all()
  count = 0
  coffee_id = 0
  for i in coffee:
    if active == -1 and count == 0:
      nav_links.append(("/"+str(i['id']),"active",i['name']))
      name = i['name']
      description = i['description']
      coffee_id = i['id']
      name_link = '/'+str(i['id'])
    elif active == -1 and count > 0:
      nav_links.append(("/"+str(i['id']),"",i['name']))
    elif active == i['id']:
      nav_links.append(("/"+str(i['id']),"active",i['name']))
      name = i['name']
      description = i['description']
      coffee_id = i['id']
      name_link = '/'+str(i['id'])
    else:
      nav_links.append(("/"+str(i['id']),"",i['name']))
    count = count+1
  for i in nav_links:
    print i
  print name
  print name_link

  return nav_links, name, name_link, description, coffee_id
Ejemplo n.º 13
0
class NumberStore():
    def __init__(self, filename):
        self.db = TinyDB(filename)

    def initNumber(self, number):
        if not self.getNumberDict(number):
            self.db.insert({'number': number, 'accesses': [], 'info': '#yolo'})

    def touchNumber(self, number):
        self.initNumber(number)

        #print(self.getNumberDict(number))
        #accesses = self.getNumberDict(number)['accesses'].append(datetime.datetime.now())

        #self.db.update({'accesses': accesses}, where('number') == number)

    def getNumberDict(self, number):
        return self.db.get(where('number') == number)

    def getNumberList(self):
        return (entry['number'] for entry in self.db.all())

    def getAccesses(self, number):
        # if not number in self.db['numbers']:
        #     return None
        # if not 'info' in self.db['numbers'][number]:
        #     return None
        #
        # return self.db['numbers'][number]['info']
        return []

    def getInfo(self, number):
        return self.getNumberDict(number)['info']

    def setInfo(self, number, info):
        self.initNumber(number)
        self.db.update({'info': info}, where('number') == number)

        print(self.db.all())

    def deleteNumber(self, number):
        self.db.remove(where('number') == number)

        print(self.db.all())

    def close(self):
        self.db.close()
Ejemplo n.º 14
0
def test_gc(tmpdir):
    # See https://github.com/msiemens/tinydb/issues/92
    path = str(tmpdir.join('db.json'))
    table = TinyDB(path).table('foo')
    table.insert({'something': 'else'})
    table.insert({'int': 13})
    assert len(table.search(where('int') == 13)) == 1
    assert table.all() == [{'something': 'else'}, {'int': 13}]
Ejemplo n.º 15
0
def get_genres(database_name):
    """Utility method to get all the genres as a set"""
    db = TinyDB(os.path.join(os.getcwd(), database_name))
    all_genres = { song['genre'] for song in db.all() }
    specific_genres = set()
    for genre in all_genres:
        specific_genres = specific_genres.union(set(genre.strip().split('/')))
    db.close()
    return _strip_spaces(specific_genres)
Ejemplo n.º 16
0
class Test_008_Insert_exits_data_Function(unittest.TestCase):

	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_insert_by_query(self):
		print("case 8 can insert existing data")
		self.db.insert({'Name': 'Yingyu Wu', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		self.db.insert({'Name': 'Yingyu Wu', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		result_array = self.db.search(where('Name') == 'Yingyu Wu')
		num = len(result_array)
		#print (result_array)
		#print("search one key,get %d result" %(num))
		self.assertEqual(2,num)
Ejemplo n.º 17
0
class FolderManager:
    def __init__(self, path):
        self.db_file = os.path.join(path, CONF_DIR, FOLDER_DB_FN)
        self._db = TinyDB(self.db_file)

    def open_db(self):
        self._db = TinyDB(self.db_file)

    def close_db(self):
        self._db.close()

    def add_folder(self, file_name):
        if not self.folder_exists(file_name):
            entry = {'file_name': file_name}
            self._db.insert(entry)

    def get_all_entries(self):
        return self._db.all()

    def folder_exists(self, file_name):
        """ checks if a folder has been added """
        entries = self._db.search(where('file_name') == file_name)
        if entries:
            return True
        else:
            return False

    def remove_element(self, file_name):
        self._db.remove(where('file_name') == file_name)

    def get_file_names(self):
        """ returns all the file names of folders that the user has added """
        file_names = []
        for entry in self._db.all():
            file_names.append(entry['file_name'])
        return file_names

    def get_folder_by_name(self, expected_name):
        """ get documents by the specified property """
        entry = self._db.get(where('file_name') == expected_name)
        return entry

    def clear_all(self):
        self._db.purge()
Ejemplo n.º 18
0
def sync_existing(dry_run=True, verbose=True):
    """Manually sync files by comparing modification time with last known.
    """
    home = os.path.expanduser('~/')
    truncate = len(home)
    os.chdir(home)
    db = TinyDB('.sdpp-sync.json')
    for elem in db.all():
        sony, paperpile, modtime = [elem[k]
                                    for k in ['box', 'paperpile', 'modtime']]
        modified_action(elem, sony, paperpile, modtime,
                        dry_run=dry_run, verbose=verbose)
Ejemplo n.º 19
0
def update_similarity_matrix(newDOI):
    if(os.path.isfile(sim_matrix)):    
        db = TinyDB(db_loc)
        paper = Query()
        this_paper_dict = db.search(paper.ownDOI == newDOI)[0] #returns entry as dictionary
        name = this_paper_dict['filename']
        vector_path = vector_dir + name+'.pkl'
        vec_file = open(vector_path, 'rb')
        newVector = pickle.load(vec_file)
        #now load the matrix
        mat_file = open(sim_matrix,'rb')
        mat_list = pickle.load(mat_file)
        index_dict = mat_list[0];
        mat = mat_list[1];
        if newDOI in index_dict:
            return False
        
        num_ent =   np.size(mat,axis = 0)
##        if sizemat == 1:
##            num_ent = 1
##        else:
##            num_ent = sizemat[0]
            
        index_dict[newDOI] = num_ent

        #extend the matrix to appropriate size
        mat = np.vstack((mat,np.zeros((1,num_ent))))
        mat = np.hstack((mat,np.zeros((num_ent+1,1))))
        print mat
        
        #get all the entries of the db
        all_DOIs = db.all()
        for line in all_DOIs:
            print vector_path
            name = line['filename']
            vector_path = vector_dir+name+'.pkl'
            print vector_path
            vec_file = open(vector_path,'rb')
            vec = pickle.load(vec_file)
            closeness = vector_mult_and_add(newVector,vec)
            newInd = index_dict[newDOI]
            oldInd = index_dict[line['ownDOI']]
            mat[newInd][oldInd] = closeness;
            mat[oldInd][newInd] = closeness;

        output_mat_list = [index_dict,mat]
        output = open(sim_matrix,'wb')
        pickle.dump(output_mat_list,output)   
        return None
    else:
        init_similarity_matrix(newDOI)
        return None
Ejemplo n.º 20
0
def latest_run():
    form = SearchForm()
    annotateRunForm = AnnotateRunForm()

    db = TinyDB(recipyGui.config.get('tinydb'))

    runs = db.all()
    runs = sorted(runs, key = lambda x: parse(x['date'].replace('{TinyDate}:', '')), reverse=True)

    db.close()

    return render_template('details.html', query='', form=form, run=runs[0],
                           annotateRunForm=annotateRunForm,
                           active_page='latest_run')
Ejemplo n.º 21
0
class ListCache(object):

    DB_FILE = 'listing_db.json'
    DB_TTL = timedelta(hours=12)

    def __init__(self):
        self.db = TinyDB(os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), ListCache.DB_FILE))

    @property
    def db(self):
        return self._db

    @db.setter
    def db(self, db):
        self._db = db

    def listing_in_cache(self, listing):
        lquery = Query()
        return self.db.contains(lquery.hsh == listing.hsh)

    def retrieve_listing(self, listing):
        lquery = Query()
        list_dict = self.db.get(lquery.hsh == listing.hsh)
        return Listing.from_dict(list_dict)

    def insert_listing(self, listing):
        if self.listing_in_cache(listing):
            self.update_listing(listing)
        else:
            list_dict = listing.as_dict()
            list_dict['last_updated'] = datetime.now().isoformat()
            list_dict['hsh'] = listing.hsh
            self.db.insert(list_dict)

    def remove_listing(self, listing):
        lquery = Query()
        self.db.remove(lquery.hsh == listing.hsh)

    def update_listing(self, listing):
        lquery = Query()
        if self.listing_in_cache(listing):
            self.remove_listing(listing)
        self.insert_listing(listing)

    def remove_old_listings(self):
        list_ar = self.db.all()
        for listing in list_ar:
            if datetime.strptime(listing['last_updated'], '%Y-%m-%dT%H:%M:%S.%f') < datetime.now() - ListCache.DB_TTL:
                self.remove_listing(Listing.from_dict(listing))
Ejemplo n.º 22
0
class MapController(BaseController):
    def __init__(self):
        self.addresses = TinyDB(self.databaseDirectory + "/addresses.json")

    def get(self):
        addresses = self.addresses.all()

        markers = []

        for address in addresses:
            location = address["cords"]
            if location[0] and location[1]:
                markers.append(address["cords"])

        return markers
Ejemplo n.º 23
0
    def test_empty_cache(self):
        """test un-cached behavior"""
        data = forecast_utils.check_prediction_cache(
            self.region_id,
            self.type_id,
            cache_path=self.cache_path
        )
        assert data is None

        assert path.isfile(self.cache_filepath)

        tdb = TinyDB(self.cache_filepath)

        assert tdb.all() == []

        tdb.close()
Ejemplo n.º 24
0
 def test_db_purge_when_all_posted(self):
     available_files = list()
     for index in range(1,101):
         image_name = 'image{0}.png'.format(index)
         available_files.append(image_name)
     db = TinyDB(storage=storages.MemoryStorage)
     for id in range(1,106):
         image_name = 'image{0}.png'.format(id)
         db.insert({'image': image_name})
     self.assertEqual(len(db.all()), 105)
     unused_files = utils.get_unused_files(db, available_files)
     self.assertEqual(len(unused_files), 100)
     self.assertEqual(unused_files[0], 'image1.png')
     self.assertEqual(unused_files[5], 'image6.png')
     self.assertEqual(unused_files[10], 'image11.png')
     self.assertEqual(unused_files[33], 'image34.png')
     self.assertEqual(unused_files[50], 'image51.png')
Ejemplo n.º 25
0
 def test_tweet_insert_into_db(self):
     config_parser = configparser.ConfigParser()
     config_parser.optionxform = str
     config_parser.read('tests/goldfinchsong.ini')
     active_configuration = parse_configuration(config_parser)
     credentials = active_configuration['credentials']
     text_conversions = active_configuration['text_conversions']
     try:
         db = TinyDB(active_configuration['db_location'])
         manager = Manager(credentials, db, active_configuration['image_directory'], text_conversions)
         manager.api = MockManagerAPI()
         manager.post_tweet()
         tweets = db.all()
         self.assertEqual(len(tweets), 1)
     finally:
         if os.path.isfile(active_configuration['db_location']):
             os.remove(active_configuration['db_location'])
Ejemplo n.º 26
0
def latest_run():
    form = SearchForm()
    annotateRunForm = AnnotateRunForm()

    db = TinyDB(recipyGui.config.get('tinydb'))

    runs = db.all()
    runs = sorted(runs, key = lambda x: parse(x['date'].replace('{TinyDate}:', '')), reverse=True)
    r = db.get(eid=runs[0].eid)
    diffs = db.table('filediffs').search(Query().run_id == r.eid)

    db.close()

    return render_template('details.html', query='', form=form, run=r,
                           annotateRunForm=annotateRunForm,
                           dbfile=recipyGui.config.get('tinydb'), diffs=diffs,
                           active_page='latest_run')
Ejemplo n.º 27
0
 def test_storage_in_db(self):
     image_directory = 'tests/images/'
     # let's load a list of tweets into the db
     db = TinyDB(storage=storages.MemoryStorage)
     image_names = [
         'goldfinch1.jpg',
         'goldfinch2.jpg',
         'goldfinch3.jpg',
         'goldfinch4.jpg'
     ]
     for image_name in image_names:
         delivery_timestamp = datetime.now(tz=timezone.utc).isoformat()
         tweet = {'image': image_name, 'delivered_on': delivery_timestamp}
         db.insert(tweet)
     content = utils.load_content(db, image_directory)
     self.assertEqual(content[2], 'goldfinch5.jpg')
     tweets = db.all()
     self.assertEqual(len(tweets), 4, msg=tweets)
Ejemplo n.º 28
0
class BotHelper:
	def __init__(self, bot):
		self.db 	= TinyDB('db.json')
		self.bot 	= bot

	def executeAssignment(self, chat_id, assignment):
		if assignment['command'] == 'add':
			entries = Query()
			if self.db.search(entries.trigger == assignment['trigger']):
				self.bot.sendMessage(chat_id=chat_id, text='No duplicates allowed for now')
				return False
			else:
				if assignment['response_type'] == 'image' or assignment['response_type'] == 'gif':
					filename 	= assignment['response'].split('/')[-1]
					extention 	= filename.split('.')[-1]
					urllib.request.urlretrieve(assignment['response'], 'media/images/' + assignment['trigger'] + '.' + extention)
					assignment['response'] = 'media/images/' + assignment['trigger'] + '.' + extention

				self.db.insert({
								'trigger':		assignment['trigger'],
								'response_type':assignment['response_type'],
								'response':		assignment['response'],
								})
		if assignment['command'] == 'del':
			self.db.remove(where('trigger') == assignment['trigger'])
		if assignment['command'] == 'list':
			entries = self.db.all()
			for entry in entries:
				self.bot.sendMessage(chat_id=chat_id, text=entry)

	def executeTrigger(self, chat_id, message):
		words 	= message.split(' ') 
		entries = Query()
		for word in words:
			entry = self.db.search(entries.trigger == word)
			if entry:
				if entry[0]['response_type'] == 'text':
					self.bot.sendMessage(chat_id=chat_id, text=entry[0]['response'])
				if entry[0]['response_type'] == 'image':
					img = open(entry[0]['response'], 'rb')
					self.bot.sendPhoto(chat_id=chat_id, photo=img)
				if entry[0]['response_type'] == 'gif':
					img = open(entry[0]['response'], 'rb')
					self.bot.sendDocument(chat_id=chat_id, document=img)
Ejemplo n.º 29
0
class DataStore(object):
	def __init__(self, store_path):
		self.store_path = os.path.join(store_path,"META")
		try:
			os.makedirs(self.store_path)
		except OSError as exc:
			if exc.errno != errno.EEXIST:
				raise

		self.db = TinyDB(os.path.join(self.store_path,"__meta__.json"))

	def add_file(self, file_path):
		file_path = self._get_internal_path(file_path)
		self._add_to_db(file_path)

	def remove_file(self,file_path):
		file_path = self._get_internal_path(file_path)
		self._remove_from_db(file_path)
		
	def list_all(self):
		return self._list_all_db()
		
	def _init_file_list(self):
		with open(self.store_path,"r") as f:
			for line in tqdm(f):
				self.file_list.append(f)

	def _init_db(self):
		self.db = TinyDB(self.store_path)
		
	def _add_to_db(self,file_path):
		if not self.db.contains(where('file_path')== file_path):
			self.db.insert({'file_path':file_path})

	def _remove_from_db(self,file_path):
		self.db.remove(where('file_path') == file_path)
	
	def _list_all_db(self):
		return [rec['file_path'] for rec in self.db.all()]

	def _get_internal_path(self, path):
		return get_internal_path(path)
Ejemplo n.º 30
0
 def get_db_list(self, PATH=None):
     '''
     :param PATH:
     :return:
     '''
     db = TinyDB(os.environ[config.STORAGE_INDEX_DB])
     list = db.all()
     results = []
     for i in list:
         results.append(
             {
                 'name': i['data']['name'],
                 'map': i['data']['map'],
                 'selfid': i['id'],
                 'id': i['data']['id'],
                 'addresses': i['data']['url'],
                 'type': 'url',
                 'data': i['data']['data']
             }
         )
     return results
Ejemplo n.º 31
0
    ap = S[input_var - 1]
    print ap["ssid"]
    print ap["address"]
    print ap["encrypted"]
    print ap["channel"]
    print "---------------------------------------------"

    loop = True
    while loop:
        try:
            input_var = int(
                input("1: Store Valid AP \n2: Disregard and Continue\n:"))
            if input_var > 0 and input_var <= 2:
                loop = False
        except ValueError:
            pass

    if input_var == 1:
        if db.search((where('ssid') == ap["ssid"])
                     & (where('address') == str(ap["address"]))) == []:
            db.insert(ap)
        else:
            print "already Stored in the database"
        '''
        print all database
        '''
        print db.all()

    scan = scanning("wlan4", db)
    scan.scan()
Ejemplo n.º 32
0
class atxserver2(object):
    """
    According to users requirements to select devices
    """
    def __init__(self, url):
        """
        Construct method
        """
        self._db = TinyDB(storage=MemoryStorage)
        if url and re.match(r"(http://)?(\d+\.\d+\.\d+\.\d+:\d+)", url):
            if '://' not in url:
                url = 'http://' + url
            else:
                url = url
            self._url = url
            self.load()
        else:
            logger.error('Atx server addr error')
        self.load()

    def load(self, **kwargs):
        """
        Use the data which got from stf platform to crate query db

        :return: the len of records in the db's table
        """
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        res = requests.get(self._url + '/api/v1/devices', **kwargs).json()
        if res is not None:
            eids = self._db.insert_multiple(res['devices'])
            return len(eids)
        else:
            return 0

    def find(self, cond=None):
        """
        According condition to filter devices and return
        :param cond: condition to filter devices
        :type cond: where
        :return: stf_selector object and its db contains devices
        """
        if cond is not None:
            res = self._db.search(cond)
            self.purge()
            self._db.insert_multiple(res)
        return self

    def devices(self):
        """
        return all devices that meeting the requirement
        :return: list of devices
        """
        return self._db.all()

    def refresh(self):
        """
        reload the devices info from stf
        :return: the len of records in the db's table
        """
        self.purge()
        return self.load()

    def count(self):
        """
        count the records in the db's table
        :return: the len of records in the db's table
        """
        return len(self._db.all())

    def purge(self):
        """
        remove all the data from the db
        :return:
        """
        self._db.purge()

    def online_devices(self):
        '''查找online 的设备'''
        self.refresh()
        devices = self.find(where('present') == True).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def present_ios_devices(self, **kwargs):
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        self.refresh()
        self.find(where('platform') == 'apple').devices()
        devices = self.find(where('present') == True).devices()
        if len(devices) > 0:
            return [
                requests.get(
                    self._url + '/api/v1/user/devices/' + device['udid'],
                    **kwargs).json()['device'] for device in devices
            ]
        else:
            return False

    def present_android_devices(self, **kwargs):
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        self.refresh()
        self.find(where('platform') == 'android').devices()
        devices = self.find(where('present') == True).devices()
        if len(devices) > 0:
            return [
                requests.get(
                    self._url + '/api/v1/user/devices/' + device['udid'],
                    **kwargs).json()['device'] for device in devices
            ]
        else:
            return False

    def present_udid_devices(self, **kwargs):
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        present_udid_devices_list = []
        for udid in ReadConfig().get_server_udid():
            self.refresh()
            self.find(where('udid') == udid).devices()
            device = self.find(where('present') == True).devices()
            if device:
                present_udid_devices_list.append(
                    requests.get(self._url + '/api/v1/user/devices/' + udid,
                                 **kwargs).json()['device'])
            else:
                pass
        if len(present_udid_devices_list) > 0:
            return present_udid_devices_list
        else:
            return False

    def using_device(self, udid, **kwargs):
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        # kwargs['json'] = {"udid": udid}
        ret = requests.post(self._url + '/api/v1/user/devices',
                            json={
                                "udid": udid,
                                "idleTimeout": 7200
                            },
                            **kwargs)
        if ret.status_code == 200:
            print(ret.json())
            return True
        else:
            return False

    def release_device(self, udid, **kwargs):
        kwargs['headers'] = {"Authorization": "Bearer " + token}
        ret = requests.delete(self._url + '/api/v1/user/devices/' + udid,
                              **kwargs)
        if ret.status_code == 200:
            print(ret.json())
            return True
        else:
            return False
Ejemplo n.º 33
0
def get_unregistered_tip():
    db = TinyDB(config.unregistered_tip_user)
    data = db.all()
    db.close()
    return data
Ejemplo n.º 34
0
class TinyDBCatalogueProvider(BaseProvider):
    """TinyDB Catalogue Provider"""
    def __init__(self, provider_def):
        """
        Initialize object

        :param provider_def: provider definition

        :returns: pygeoapi.provider.tinydb_.TinyDBCatalogueProvider
        """

        self.excludes = [
            '_metadata-anytext',
        ]

        super().__init__(provider_def)

        LOGGER.debug('Connecting to TinyDB db at {}'.format(self.data))

        if not os.path.exists(self.data):
            msg = 'TinyDB does not exist'
            LOGGER.error(msg)
            raise ProviderConnectionError(msg)

        self.db = TinyDB(self.data)

        self.fields = self.get_fields()

    def get_fields(self):
        """
         Get provider field information (names, types)

        :returns: dict of fields
        """

        fields = {}

        try:
            r = self.db.all()[0]
        except IndexError as err:
            LOGGER.debug(err)
            return fields

        for p in r['properties'].keys():
            if p not in self.excludes + ['extent']:
                fields[p] = {'type': 'string'}

        fields['q'] = {'type': 'string'}

        return fields

    def query(self,
              offset=0,
              limit=10,
              resulttype='results',
              bbox=[],
              datetime_=None,
              properties=[],
              sortby=[],
              select_properties=[],
              skip_geometry=False,
              q=None,
              **kwargs):
        """
        query TinyDB document store

        :param offset: starting record to return (default 0)
        :param limit: number of records to return (default 10)
        :param resulttype: return results or hit limit (default results)
        :param bbox: bounding box [minx,miny,maxx,maxy]
        :param datetime_: temporal (datestamp or extent)
        :param properties: list of tuples (name, value)
        :param sortby: list of dicts (property, order)
        :param select_properties: list of property names
        :param skip_geometry: bool of whether to skip geometry (default False)
        :param q: full-text search term(s)

        :returns: dict of 0..n GeoJSON feature collection
        """

        Q = Query()
        LOGGER.debug('Query initiated: {}'.format(Q))

        QUERY = []

        feature_collection = {'type': 'FeatureCollection', 'features': []}

        if resulttype == 'hits':
            LOGGER.debug('hits only specified')
            limit = 0

        if bbox:
            LOGGER.debug('processing bbox parameter')
            bbox_as_string = ','.join(str(s) for s in bbox)
            QUERY.append(
                "Q.properties.extent.spatial.bbox.test(bbox_intersects, '{}')".
                format(bbox_as_string))  # noqa

        if datetime_ is not None:
            LOGGER.debug('processing datetime parameter')
            if self.time_field is None:
                LOGGER.error('time_field not enabled for collection')
                raise ProviderQueryError()

            if '/' in datetime_:  # envelope
                LOGGER.debug('detected time range')
                time_begin, time_end = datetime_.split('/')

                if time_begin != '..':
                    QUERY.append(
                        "(Q.properties[self.time_field]>='{}')".format(
                            time_begin))  # noqa
                if time_end != '..':
                    QUERY.append(
                        "(Q.properties[self.time_field]<='{}')".format(
                            time_end))  # noqa

            else:  # time instant
                LOGGER.debug('detected time instant')
                QUERY.append("(Q.properties[self.time_field]=='{}')".format(
                    datetime_))  # noqa

        if properties:
            LOGGER.debug('processing properties')
            for prop in properties:
                QUERY.append("(Q.properties['{}']=='{}')".format(*prop))

        if q is not None:
            for t in q.split():
                QUERY.append(
                    "(Q.properties['_metadata-anytext'].search('{}', flags=re.IGNORECASE))"
                    .format(t))  # noqa

        QUERY_STRING = '&'.join(QUERY)
        LOGGER.debug('QUERY_STRING: {}'.format(QUERY_STRING))
        SEARCH_STRING = 'self.db.search({})'.format(QUERY_STRING)
        LOGGER.debug('SEARCH_STRING: {}'.format(SEARCH_STRING))

        LOGGER.debug('querying database')
        if len(QUERY) > 0:
            LOGGER.debug('running eval on {}'.format(SEARCH_STRING))
            results = eval(SEARCH_STRING)
        else:
            results = self.db.all()

        feature_collection['numberMatched'] = len(results)

        if resulttype == 'hits':
            return feature_collection

        for r in results:
            for e in self.excludes:
                del r['properties'][e]

        len_results = len(results)

        LOGGER.debug('Results found: {}'.format(len_results))

        if len_results > limit:
            returned = limit
        else:
            returned = len_results

        feature_collection['numberReturned'] = returned

        if sortby:
            LOGGER.debug('Sorting results')
            if sortby[0]['order'] == '-':
                sort_reverse = True
            else:
                sort_reverse = False

            results.sort(key=lambda k: k['properties'][sortby[0]['property']],
                         reverse=sort_reverse)

        feature_collection['features'] = results[offset:offset + limit]

        return feature_collection

    def get(self, identifier, **kwargs):
        """
        Get TinyDB document by id

        :param identifier: record id

        :returns: `dict` of single record
        """

        LOGGER.debug('Fetching identifier {}'.format(identifier))

        record = self.db.get(Query().id == identifier)

        if record is None:
            raise ProviderItemNotFoundError('record does not exist')

        for e in self.excludes:
            del record['properties'][e]

        return record

    def _bbox(input_bbox, record_bbox):
        """
        Test whether one bbox intersects another

        :param input_bbox: `list` of minx,miny,maxx,maxy
        :param record_bbox: `list` of minx,miny,maxx,maxy

        :returns: `bool` of result
        """

        return True

    def __repr__(self):
        return '<TinyDBCatalogueProvider> {}'.format(self.data)
Ejemplo n.º 35
0
# We use a TinyDB as a database for our games
from tinydb import TinyDB, Query

# We create a flask app
app = flask.Flask(__name__)
# This will prevent errors beacaus of CORS but is probably making this unsecure
CORS(app)
# We use debug mode for now
app.config["DEBUG"] = True

# We open our TinyDB. Note that the file ist not yet in the github repository.
# There is a Jupyter notebook which builds the database.
game_db = TinyDB('spiele_tinydb.json')

# Create some test data for our catalog in the form of a list of dictionaries.
games = game_db.all()[0:4]


# This is just a static response if no existing API is given
@app.route('/', methods=['GET'])
def home():
    return '''<h1>My Game DB</h1>
<p>A prototype API for querying my collection of computer games.</p>'''


# We us this for testing for now to get a end to end prototype


@app.route('/api/v1/resources/games/all', methods=['GET'])
def api_all():
    return jsonify(games)
Ejemplo n.º 36
0
def normalize_images(file_index_fn,
                     table_name="hdf5_proc",
                     date=None,
                     sample=None,
                     energy=None,
                     average_ff=True,
                     cores=-2,
                     query=None,
                     jj=False,
                     read_norm_ff=False):
    """Normalize images of one experiment.
    If date, sample and/or energy are indicated, only the corresponding
    images for the given date, sample and/or energy are normalized.
    The normalization of different images will be done in parallel. Each
    file, contains a single image to be normalized.
    .. todo: This method should be divided in two. One should calculate
     the average FF, and the other (normalize_images), should receive
     as input argument, the averaged FF image (or the single FF image).
    """

    start_time = time.time()
    file_index_db = TinyDB(file_index_fn,
                           storage=CachingMiddleware(JSONStorage))
    db = file_index_db
    if table_name is not None:
        file_index_db = file_index_db.table(table_name)

    #print(file_index_db.all())

    files_query = Query()
    if date or sample or energy:
        temp_db = TinyDB(storage=MemoryStorage)
        if date:
            records = file_index_db.search(files_query.date == date)
            temp_db.insert_multiple(records)
        if sample:
            records = temp_db.search(files_query.sample == sample)
            temp_db.purge()
            temp_db.insert_multiple(records)
        if energy:
            records = temp_db.search(files_query.energy == energy)
            temp_db.purge()
            temp_db.insert_multiple(records)
        file_index_db = temp_db

    root_path = os.path.dirname(os.path.abspath(file_index_fn))

    file_records = file_index_db.all()
    #print(file_records)

    dates_samples_energies = []
    for record in file_records:
        data = (record["date"], record["sample"], record["energy"])
        if jj is True:
            data += (record["jj_u"], record["jj_d"])
        dates_samples_energies.append(data)

    dates_samples_energies = list(set(dates_samples_energies))
    num_files_total = 0
    for date_sample_energy in dates_samples_energies:
        date = date_sample_energy[0]
        sample = date_sample_energy[1]
        energy = date_sample_energy[2]

        # Raw image records by given date, sample and energy
        query_cmd = ((files_query.date == date) &
                     (files_query.sample == sample) &
                     (files_query.energy == energy) &
                     (files_query.FF == False))
        if jj is True:
            jj_u = date_sample_energy[3]
            jj_d = date_sample_energy[4]
            query_cmd &= ((files_query.jj_u == jj_u) &
                          (files_query.jj_d == jj_d))

        if query is not None:
            query_cmd &= query

        h5_records = file_index_db.search(query_cmd)
        # FF records by given date, sample and energy

        query_cmd_ff = ((files_query.date == date) &
                        (files_query.sample == sample) &
                        (files_query.energy == energy) &
                        (files_query.FF == True))

        if jj is True:
            jj_u = date_sample_energy[3]
            jj_d = date_sample_energy[4]
            query_cmd_ff &= ((files_query.jj_u == jj_u) &
                             (files_query.jj_d == jj_d))

        h5_ff_records = file_index_db.search(query_cmd_ff)
        files = get_file_paths(h5_records, root_path)
        #print(files)
        n_files = len(files)
        num_files_total += n_files
        files_ff = get_file_paths(h5_ff_records, root_path)

        if not files_ff:
            msg = "FlatFields are not present, images cannot be normalized"
            raise Exception(msg)

        # print("------------norm")
        # import pprint
        # prettyprinter = pprint.PrettyPrinter(indent=4)
        # prettyprinter.pprint(files)
        # prettyprinter.pprint(files_ff)

        if average_ff:
            # Average the FF files and use always the same average (for a
            # same date, sample, energy and jj's)
            # Normally the case of magnetism
            if read_norm_ff is True:
                ff_norm_image = get_normalized_ff(files_ff)
            else:
                #print("---files ff")
                #print(files_ff)
                #print("---files")
                #print(files)
                _, ff_norm_image = normalize_image(files[0],
                                                   ff_img_filenames=files_ff)
                files.pop(0)
            if len(files):
                Parallel(n_jobs=cores,
                         backend="multiprocessing")(delayed(normalize_image)(
                             h5_file, average_normalized_ff_img=ff_norm_image)
                                                    for h5_file in files)
        else:
            # Same number of FF as sample data files
            # Normalize each single sample data image for a single FF image
            # Normally the case of spectrocopies
            # TODO
            pass

    print("--- Normalize %d files took %s seconds ---\n" %
          (num_files_total, (time.time() - start_time)))

    db.close()
Ejemplo n.º 37
0
def average_ff(file_index_fn,
               table_name="hdf5_proc",
               date=None,
               sample=None,
               energy=None,
               cores=-2,
               query=None,
               jj=False):
    start_time = time.time()
    file_index_db = TinyDB(file_index_fn,
                           storage=CachingMiddleware(JSONStorage))
    db = file_index_db
    if table_name is not None:
        file_index_db = file_index_db.table(table_name)

    files_query = Query()
    if date or sample or energy:
        temp_db = TinyDB(storage=MemoryStorage)
        if date:
            records = file_index_db.search(files_query.date == date)
            temp_db.insert_multiple(records)
        if sample:
            records = temp_db.search(files_query.sample == sample)
            temp_db.purge()
            temp_db.insert_multiple(records)
        if energy:
            records = temp_db.search(files_query.energy == energy)
            temp_db.purge()
            temp_db.insert_multiple(records)
        file_index_db = temp_db

    root_path = os.path.dirname(os.path.abspath(file_index_fn))

    file_records = file_index_db.all()

    dates_samples_energies = []
    for record in file_records:
        data = (record["date"], record["sample"], record["energy"])
        if jj is True:
            data += (record["jj_u"], record["jj_d"])
        dates_samples_energies.append(data)

    dates_samples_energies = list(set(dates_samples_energies))
    num_files_total = 0
    for date_sample_energy in dates_samples_energies:
        date = date_sample_energy[0]
        sample = date_sample_energy[1]
        energy = date_sample_energy[2]

        # FF records by given date, sample and energy

        query_cmd_ff = ((files_query.date == date) &
                        (files_query.sample == sample) &
                        (files_query.energy == energy) &
                        (files_query.FF == True))

        if jj is True:
            jj_u = date_sample_energy[3]
            jj_d = date_sample_energy[4]
            query_cmd_ff &= ((files_query.jj_u == jj_u) &
                             (files_query.jj_d == jj_d))

        h5_ff_records = file_index_db.search(query_cmd_ff)
        files_ff = get_file_paths(h5_ff_records, root_path)
        normalize_ff(files_ff)
Ejemplo n.º 38
0
                print("END OF TOURNAMENT")
            tournamentdb.insert(newtournament)
        elif len(tournamentdb.search(existingTID.TID == newhand["TID"])) == 1:
            handcount = tournamentdb.search(existingTID.TID == newhand["TID"])[0]["handcount"] + 1
            if lasthand == True:
                print("END OF TOURNAMENT")
                tournamentdb.update({"duration": time_difference(newtournament["time"], newhand["time"])}, existingTID.TID == newhand["TID"])
                tournamentdb.update({"result": result}, existingTID.TID == newhand["TID"])
            tournamentdb.update({"handcount": handcount}, existingTID.TID == newhand["TID"])


        # # calculate end time and determine place
        # duration = time_difference(newtournament["time"], lasttime)# start time - end time in seconds.
        # newtournament["duration"] = duration
        # newtournament["handcount"] = handcount + 1
        # newtournament["buyin"] = newhand["buyin"]
        # newtournament["rake"] = newhand["rake"]
        # print(newtournament) # insert tournament into dataset
        # # create new tournament
        # lastTID = newhand["TID"]
        # newtournament = {"ID": lastTID, "time":newhand["time"]}
        # lasttime = newhand["time"]


start = time.time()
parse_file("may21-may29.txt")
end = time.time() - start
print("\ntotal seconds: " + str(end))
handdb = TinyDB("handdb.json")
print("time per hand: " + str(end/len(handdb.all())))
Ejemplo n.º 39
0
import simplejson as json
from tinydb import TinyDB, Query, where

db = TinyDB('C:/python_Webcroling/section5/databases/database1.db')

# db.insert({'name':'kim','email':'*****@*****.**'}) #json(dict) {}
# db.insert_multiple([{'name':'lee','email':'*****@*****.**'},{'name':'park','email':'*****@*****.**'}]) #jsonArray(dict) [{},{},{}]

SQL = Query()

el = db.get(SQL.name == 'kim')

print(el)
print(el.doc_id)

#데이터 수정
db.update({'email': '*****@*****.**'}, doc_ids=[1])

#데이터 수정 및 수정
db.upsert({'email': 'tesdf@naver', 'login': True}, SQL.name == 'park')

#데이터 삭제
# db.remove(doc_ids=[1,3])
# db.remove(SQL.name=='park')

#전체조회
print(db.all())
Ejemplo n.º 40
0
from tinydb import TinyDB, Query
db = TinyDB('World.json')

print(db.all()[0])
Ejemplo n.º 41
0
class Blockchain():
    def __init__(self):
        self.db = TinyDB('blks.json')
        self.utxo_pool = Utxos()
        self.utxo_pool.update_pool([])  ##############################
        self.bits = 0x33222222
        try:
            with open("miner_key", "r") as f:
                self.miner_wif = f.read(51)
            with open("miner_address.txt", "r") as f2:
                self.address = f2.read(34)
        except:
            with open("miner_key", "w") as f:
                privkey = wallet.gen_privkey()
                self.miner_wif = wallet.privkey_to_wif(privkey)
                f.write(self.miner_wif + "\n")
            with open("miner_address.txt", "w") as f2:
                self.address = wallet.gen_address(
                    wallet.get_pubkey_str(privkey))
                f2.write(self.address + "\n")

    def mine(self):
        if self.height() == 0:
            return self.genesis_block()
        txs = pending_pool.get_first3()
        fee = 0
        if len(txs) > 0:
            fee = (len(txs)) * tx_fee
            print(f'fee = {fee}')
        coinbase_tx = form_coinbase(self.address, self.miner_wif,
                                    self.get_current_reward() + fee)
        serialized_cb = Serializer().serialize(coinbase_tx)
        txs.insert(0, serialized_cb)
        b = Block(time.time(), self.prev_hash(), txs, 0, self.bits).mine()
        b.height = self.height()
        print("Congratulations! Block " + b.toJSON() + " was mined!")
        self.db.insert({
            'Block Size': 0xffffffff,
            'Version': 1,
            'Previous Block Hash': b.prev_hash,
            'Merkle Root': b.merkle,
            'Timestamp': int(b.timestamp),
            'Difficulty Target': hex(b.bits),
            'Nonce': b.nonce,
            'Transaction Counter': len(b.txs),
            'Transactions': b.txs
        })
        if self.height() % 5 == 0:
            self.recalculate_bits()
        self.utxo_pool.update_pool(txs)

    def genesis_block(self):
        serialized_cb = Serializer().serialize(
            form_coinbase(self.address, self.miner_wif,
                          self.get_current_reward()))
        txs = [serialized_cb]
        b = Block(time.time(), first_prev_txid, txs, 0, self.bits).mine()
        print("Congratulations! Block " + b.toJSON() + " was mined!")
        self.db.insert({
            'Block Size': 0xffffffff,
            'Version': 1,
            'Previous Block Hash': b.prev_hash,
            'Merkle Root': b.merkle,
            'Timestamp': int(b.timestamp),
            'Difficulty Target': hex(b.bits),
            'Nonce': b.nonce,
            'Transaction Counter': len(b.txs),
            'Transactions': b.txs
        })
        self.utxo_pool.update_pool(txs)

    def resolve_conflicts(self):
        nodes_list = self.get_nodes_list()
        if len(nodes_list) == 0:
            print("There are no nodes in the list")
            return
        longest_chain_url = nodes_list[0]
        longest_length = 0
        for node in nodes_list:
            cur_len = requests.get("http://" + node + "/chain/length")
            cur_len = int(cur_len.json())
            if longest_length < cur_len:
                longest_chain_url = node
                longest_length = cur_len
        chain = requests.get("http://" + longest_chain_url + "/chain").json()
        if self.height() < longest_length:  #check
            open("blks.json", "w").close()
            open("utxo.json", "w").close()
            for c in chain:
                print("BLOCK----> " + str(c))
                self.db.insert(c)
                self.utxo_pool.update_pool(c['Transactions'])

    def recalculate_bits(self):
        print(f'Old bits: {self.bits}')
        diff_time = self.db.all()[-1]['Timestamp'] - self.db.all(
        )[-5]['Timestamp']
        print(f'diff time: {diff_time}')
        if diff_time > max_time:
            diff_time = max_time
        elif diff_time < min_time:
            diff_time = min_time
        new_target = (diff_time * count_target(self.bits)) // max_time
        if new_target > max_target:
            new_target = max_target
        self.bits = to_bitsn(new_target)
        print(f'New bits: {self.bits}')

    def is_valid_chain(self):
        prev = block_from_JSON(1)
        if prev.hash != prev.calculate_hash() \
            or prev.merkle != merkle.calculate(prev):
            return False
        for i in range(2, self.height() + 1):
            blk = block_from_JSON(i)
            if blk.hash != blk.calculate_hash() \
            or blk.merkle != merkle.calculate(blk) \
            or blk.prev_hash != prev.hash:
                return False
        return True

    def get_nodes_list(self):
        try:
            f = open("nodes.txt", "r")
            lines = f.readlines()
            nodes = []
            for l in lines:
                nodes.append(l.replace("\n", ""))
            f.close()
            return nodes
        except:
            return []

    def add_node(self, ip):
        if not re.match(r'(\d{1,3}\.){3}\d{1,3}:\d{4}', ip):
            print(
                "Please, enter node in format: ip:port (ex.: 192.12.0.1:5050)")
            return
        with open("nodes.txt", "a+") as f:
            f.write(ip + "\n")
        print(str(ip) + " was added to list of nodes.")

    def submit_tx(self, route, tx):
        requests.post(route, json=tx)

    def prev_hash(self):
        prev_block = block_from_JSON(self.height() - 1)
        prev_hash = prev_block.calculate_hash()
        return prev_hash

    def height(self):
        height = len(self.db)
        return height

    def get_difficulty(self):
        diff = self.db.all()[self.height()]['difficulty']
        return diff

    def get_current_reward(self):
        denominator = math.pow(2, self.height() // 5)
        return int(g_miner_reward / denominator)

    def get_block_info(self, data):
        if data.isnumeric():
            blk = block_from_JSON(int(data))
            if blk:
                return blk.toJSON()
        for i in range(self.height()):
            blk = block_from_JSON(i)
            if blk and blk.hash == data:
                return blk.toJSON()
        print(f'Block with hash {data} doesnt exist.')
import signal
import time
import sys
import requests
import uuid
import json
import ast

from pirc522 import RFID
from tinydb import TinyDB, Query
from ast import literal_eval

# https://tinydb.readthedocs.io/en/latest/getting-started.html
# https://pcmweb.nl/artikelen/programmeren/bouw-je-eigen-sommentelefoon-met-een-raspberry-pi/?article-page=1
db = TinyDB('/home/pi/test/db.json')
db.all()

for item in db:
    print str((item))

Card = Query()
search = db.search(Card.tag == 70246357)
names = [r['name'] for r in search]
names = ast.literal_eval(json.dumps(names))

#print (names[0])
#names.encode("utf-8")

run = True
rdr = RFID()
util = rdr.util()
Ejemplo n.º 43
0
def load_cookies() -> Dict[str, Any]:
    db = TinyDB(path.join(DATA_DIR, 'cookies.json'))
    return db.all()[0] or {}
Ejemplo n.º 44
0
import rq_dashboard
from worker_functions import start_check
import subprocess
import serial

r = redis.Redis()
q = Queue(connection=r, default_timeout=144000)

app = Flask(__name__)
app.config.from_object(rq_dashboard.default_settings)
app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rq")

db = TinyDB('db.json')
# lock to control access to variable

th = threading.Thread(target=ScoreChecker, args=(db.all()[0]['id'], ))


def start_checking_thread():
    th.start()


def stop_checking_thread():
    pass


@app.route('/')
def hello_world():

    records = db.all()
    #checking_score = db.table("game").all()[0]["checking_score"]
Ejemplo n.º 45
0
class FileHandler():
    # Private variables

    __machineryFilePath = os.path.abspath("./data/machinery.json")

    __adjusterFilePath = os.path.abspath("./data/adjuster.json")

    __utilizationFilePath = os.path.abspath("./data/simulationResult.json")

    def __init__(self, *args, **kwargs):
        self.mcDB = TinyDB(self.__machineryFilePath)
        self.adDB = TinyDB(self.__adjusterFilePath)
        self.utDB = TinyDB(self.__utilizationFilePath)

    def close_db(self):
        self.mcDB.close()
        self.adDB.close()
        self.utDB.close()

    # TODO:
    def readAll(self):
        data = {
            "adj": None,
            "mch": None,
        }
        data["adj"] = self.adDB.all()
        data["mch"] = self.mcDB.all()
        return data  # string

    def writeToFile(self, name, data):
        if name == _ADJ:
            success = self.adDB.insert(data)
            adjuster = Query()
            self.adDB.update({'ident': success}, doc_ids=[success])
        elif name == _MCH:
            success = self.mcDB.insert(data)
        return success  #int

    def deleteAll(self):

        self.adDB.purge_tables()
        self.mcDB.purge_tables()
        self.utDB.purge_tables()

    def deleteRow(self, store, id):
        if store == _ADJ:
            self.adDB.remove(where('ident') == id)
        elif store == _MCH:
            self.mcDB.remove(where('name') == id)

    def searchAdjuster(self, id):
        adjuster = Query()
        data = self.adDB.search(adjuster.ident == int(id))
        return data  #STRING

    def searchCategory(self, name):
        category = Query()
        data = self.mcDB.search(category.name == name)
        return data

    def updateCategoryFile(self, id, new_name, quantity, MTTF):
        category = Query()
        sucess = self.mcDB.update(
            {
                "name": new_name,
                "quantity": int(quantity),
                "MTTF": float(MTTF)
            },
            doc_ids=[id])
        return sucess  #int

    # TODO:
    def updateAdjusterFile(self, option, name, qunatity, MTTF):
        sucess = ""
        return sucess  #int

    # TODO:
    def updateUtilization(self, option, list):
        success = ""
        return success  #int
Ejemplo n.º 46
0
def parse_hands(hands):
    handdb = TinyDB("handdb.json") # initialize DB
    #handdb.truncate() # clear entire DB
    tournamentdb  = TinyDB("tournamentdb.json")
    #tournamentdb.truncate() # clear entire DB
    for hand in hands:
        newhand = {}
        hand = hand.split("\n")

        if hand[1].split()[7:10] != ['(SNG', 'JackPot', 'Tournament']:
            print(hand[1].split()[7:10])
            print("not spin & go")
            print(hand)
            continue
        newhand["ID"] = hand[0].split()[5]

        # check for duplicate hand
        if len(handdb.all()) != 0:
            existinghand = Query()
            if len(handdb.search(existinghand.ID == newhand["ID"])) > 0:
                print("duplicate hand... ignoring")
                continue

        print(newhand["ID"]) # print hand ID for debugging purposes

        newhand["TID"] = hand[1].split()[10][1:-1]
        newhand["buyin"] = hand[1].split()[12][1:]
        newhand["rake"] = hand[1].split()[14][1:-1]
        newhand["stakes"] = hand[1].split()[0]
        newhand["time"] = hand[1].split()[17:]
        newhand["players"] = hand[3].split()[5][0]
        if newhand["players"] == "2":
            if hand[6].startswith("Hero"):
                newhand["position"] = "SB"
            elif hand[7].startswith("Hero"):
                newhand["position"] = "BB"

            try:
                lasthand, cards, net, showdown, allin, cev, result = twohanded_action(hand[4:])
            except:
                print("error parsing hand ID: "+ newhand["ID"] +  "  ... skipping")
                continue
        elif newhand["players"] == "3":
            if hand[7].startswith("Hero"):
                newhand["position"] = "SB"
            elif hand[8].startswith("Hero"):
                newhand["position"] = "BB"
            else:
                newhand["position"] = "BU"

            try:
                lasthand, cards, net, showdown, allin, cev, result = threehanded_action(hand[4:])
            except:
                print("error parsing hand ID: "+ newhand["ID"] +  "  ... skipping")
                continue
        newhand["cards"] = cards
        newhand["net"] = net
        newhand["showdown"] = showdown
        newhand["cev"] = cev

        # insert hand into DB
        handdb.insert(newhand)
        # query existing tournament database and check if tournament already exists
        existingTID = Query()
        if (tournamentdb.search(existingTID.TID == newhand["TID"])) == []:
            newtournament = {}
            newtournament["TID"] = newhand["TID"]
            newtournament["handcount"] = 1
            newtournament["buyin"] = newhand["buyin"]
            newtournament["rake"] = newhand["rake"]
            newtournament["time"] = newhand["time"]
            newtournament["result"] = "unfinished"
            newtournament["duration"] = "unfinished"
            if lasthand == True:
                newtournament["duration"] = "0"
                newtournament["result"] = result
                print("END OF TOURNAMENT")
            tournamentdb.insert(newtournament)
        elif len(tournamentdb.search(existingTID.TID == newhand["TID"])) == 1:
            handcount = tournamentdb.search(existingTID.TID == newhand["TID"])[0]["handcount"] + 1
            if lasthand == True:
                print("END OF TOURNAMENT")
                tournamentdb.update({"duration": time_difference(newtournament["time"], newhand["time"])}, existingTID.TID == newhand["TID"])
                tournamentdb.update({"result": result}, existingTID.TID == newhand["TID"])
            tournamentdb.update({"handcount": handcount}, existingTID.TID == newhand["TID"])
Ejemplo n.º 47
0
class ATX_Server(object):
    """
    According to users requirements to select devices
    """

    def __init__(self, url):
        """
        Construct method
        """
        self._db = TinyDB(storage=MemoryStorage)
        if url and re.match(r"(http://)?(\d+\.\d+\.\d+\.\d+:\d+)", url):
            if '://' not in url:
                url = 'http://' + url + '/list'
            else:
                url = url + '/list'
            self._url = url
            self.load()
        else:
            logger.error('Atx server addr error')
        self.load()

    def load(self):
        """
        Use the data which got from stf platform to crate query db

        :return: the len of records in the db's table
        """
        res = requests.get(self._url).json()
        if res is not None:
            eids = self._db.insert_multiple(res)
            return len(eids)
        else:
            return 0

    def find(self, cond=None):
        """
        According condition to filter devices and return
        :param cond: condition to filter devices
        :type cond: where
        :return: stf_selector object and its db contains devices
        """
        if cond is not None:
            res = self._db.search(cond)
            self.purge()
            self._db.insert_multiple(res)
        return self

    def devices(self):
        """
        return all devices that meeting the requirement
        :return: list of devices
        """
        return self._db.all()

    def refresh(self):
        """
        reload the devices info from stf
        :return: the len of records in the db's table
        """
        self.purge()
        return self.load()

    def count(self):
        """
        count the records in the db's table
        :return: the len of records in the db's table
        """
        return len(self._db.all())

    def purge(self):
        """
        remove all the data from the db
        :return:
        """
        self._db.purge()

    def ready_devices(self):
        '''查找标记为ready的设备'''
        self.refresh()
        devices = self.find(where('ready') == True).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def online_devices(self):
        '''查找online 的设备'''
        self.refresh()
        devices = self.find(where('present') == True).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def model_devices(self, model):
        '''查找特定型号的设备'''
        self.refresh()
        devices = self.find(where('model') == model).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def brand_devices(self, brand):
        '''查找特定品牌的设备'''
        self.refresh()

        devices = self.find(where('brand') == brand).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def sdk_devices(self, sdk):
        '''查找特定SDK的设备'''
        self.refresh()
        devices = self.find(where('sdk') == sdk).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def version_devices(self, version):
        '''查找特定SDK的设备'''
        self.refresh()
        devices = self.find(where('version') == version).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def serial_devices(self, serial):
        '''查找特定serial的设备'''
        self.refresh()
        devices = self.find(where('serial') == serial).devices()
        if len(devices) > 0:
            return devices
        else:
            return False

    def all_devices(self):
        '''返回所有的设备'''
        self.refresh()
        devices = self.find().devices()
        if len(devices) > 0:
            return devices
        else:
            return False
Ejemplo n.º 48
0
    reddit = praw.Reddit(config.bot_config)
    print "Current gold credit  => " + str(reddit.user.me().gold_creddits)
else:
    print "Formater used for analysis is " + args.formater

    # check users history, list file in path
    history_path = config.history_path
    only_files = [f for f in listdir(history_path) if isfile(join(history_path, f))]

    for username in only_files:
        clean_user = username.replace('.json', '')

        u = models.User(clean_user)

        db = TinyDB(config.history_path + clean_user + '.json')
        data_histo = db.all()
        for row in data_histo:

            date_check = datetime.datetime.strptime(row['time'], '%Y-%m-%dT%H:%M:%S.%f')
            key = date_check.strftime(args.formater)

            # Compute registration
            if u.is_registered():
                if clean_user not in list_of_user['all']:
                    list_of_user['all'].append(clean_user)

                if key not in list_of_user.keys():
                    list_of_user[key] = []

                if clean_user not in list_of_user[key]:
                    list_of_user[key].append(clean_user)
Ejemplo n.º 49
0
from tinydb import TinyDB, Query

db = TinyDB("database.json")

q = Query()

db.remove(q.username == "Azim")
data = db.all()
print(data)
Ejemplo n.º 50
0
ml=StringVar()
url=Entry(window,width=38,textvariable=ur)
url.grid(row=1,column=2,sticky=W,padx=5,pady=5)
token=Entry(window,width=38,textvariable=tk)
token.grid(row=3,column=2,sticky=W,padx=5,pady=5)
mail=Entry(window,width=38,textvariable=ml)
mail.grid(row=4,column=2,sticky=W,padx=5,pady=5)

#button
Button(window,text="Generate xlsx",width=15,command=start).grid(row=6,column=2)
Button(window,text="Reset",width=15,command=end_t).grid(row=6,column=1)

#status
progress=Progressbar (window, orient = HORIZONTAL, length=390, mode = 'determinate') 
progress.grid(row=7,column=1,columnspan=2,padx=5,pady=5,sticky=W)

#listbox
logs =Listbox(window,width=65)
logs.grid(row=8,column=1,columnspan=2,padx=2,pady=2,sticky=W)
logs.insert(0,"Waiting to srart")

#auto-put data into textbox
if len(db.all())!=0:
    ur.set(db.all()[0]['url'])
    tk.set(db.all()[0]['token'])
    ml.set(db.all()[0]['mail'])


window.mainloop()

Ejemplo n.º 51
0
    'percentOwnership': 1,
    'thresholds': []
})

# insert BCE
db.insert({
    'assetID': 'BCE',
    'assetType': 'COMMON',
    'purchaseDate': '1997-12-12',
    'purchasePrice': 2.858,
    'volume': 179,
    'saleDate': None,
    'salePrice': None,
    'priceFeedType': 'YAHOO',
    'priceFeedRef': 'BCE.TO',
    'debtFeedType': None,
    'debtFeedRef': None,
    'percentOwnership': 1,
    'thresholds': []
})

allAssets = db.all()

for ass in allAssets:

    print ass

frame = pd.DataFrame(allAssets)

print frame
Ejemplo n.º 52
0
#%%
import base64
from tinydb import TinyDB, Query
import os

# %%
db = TinyDB(os.path.join('/mnt/c/pcloud_ben/personal_project/quiz', 'question.json'))


#%%
for q in db.all():
    q['question_body'] = base64.b64encode(q['question_body'].encode()).decode('utf-8')
    for ch in q['choices']:
        ch['choice_body'] = base64.b64encode(ch['choice_body'].encode()).decode('utf-8')
    Question = Query()
    db.remove(Question.id == q["id"])
    db.insert(q)
Ejemplo n.º 53
0
class devices(object):
    def __init__(self):
        self.olts = TinyDB('olts.json')
        self.pon_ports = TinyDB('pon_ports.json')
        self.onus = TinyDB('onus.json')
        self.uni_ports = TinyDB('uni_ports.json')

    def get_mock_data(self):
        """
        Get all the mock data,
        this method is mostly intended for debugging
        :return: a dictionary containing all the mocked data
        """
        olts = self.olts.all()
        pon_ports = self.pon_ports.all()
        onus = self.onus.all()
        uni_ports = self.uni_ports.all()
        return {
            'olts': olts,
            'pon_ports': pon_ports,
            'onus': onus,
            'uni_ports': uni_ports,
        }

    def clean_storage(self):
        self.olts.purge()
        self.pon_ports.purge()
        self.onus.purge()
        self.uni_ports.purge()

###############################################################
#                             OLT                             #
###############################################################

    def create_mock_olts(self, num_olts, voltservice_id):
        """
        :param num_olts: Number of OLTs to be created
        :param voltservice_id: ID if the vOLT service
        :return:
        """
        olts = []
        for index in range(1, int(num_olts) + 1):
            olt = {
                'name': 'Test OLT%s' % index,
                'volt_service_id': voltservice_id,
                'device_type': 'fake_olt',
                'host': '127.0.0.1',
                'port': index,
                'uplink': '65536',
                'switch_datapath_id': 'of:0000000000000001',
                'switch_port': str(index),
                'of_id': 'of:000000%s' % index,
                'dp_id': 'of:000000%s' % index,
            }
            # logger.info('Created OLT %s' % olt, also_console=True)
            olts.append(olt)
            self.olts.insert(olt)

        return olts

    def get_rest_olts(self):
        """
        Get all the OLTs that have been created for the test
        formatted for the XOS Rest API
        :return: a list of OLTs
        """
        return self.olts.all()

    def update_olt_id(self, olt, id):
        """
        Update in the memory storage the XOS ID
        of a particular OLT as it's needed to create PON Ports
        :param olt: The OLT object to update
        :param id:  The ID returned from XOS
        :return: None
        """
        Olt = Query()
        self.olts.update({'id': id}, Olt.name == olt['name'])

###############################################################
#                        PON PORT                             #
###############################################################

    def create_mock_pon_ports(self, num_pon):

        ports = []
        for olt in self.olts.all():
            for index in range(1, int(num_pon) + 1):
                port = {
                    'name': 'Test PonPort %s Olt %s' % (index, olt['id']),
                    'port_no': index,
                    'olt_device_id': olt['id']
                }
                ports.append(port)
                self.pon_ports.insert(port)
        return ports

    def get_rest_pon_ports(self):
        """
        Get all the PON Ports that have been created for the test
        formatted for the XOS Rest API
        :return: a list of PON Ports
        """
        return self.pon_ports.all()

    def update_pon_port_id(self, pon_port, id):
        """
        Update in the memory storage the XOS ID
        of a particular PON Port as it's needed to create ONUs
        :param pon_port: The PON Port object to update
        :param id:  The ID returned from XOS
        :return: None
        """
        PonPort = Query()
        self.pon_ports.update({'id': id}, PonPort.name == pon_port['name'])

###############################################################
#                             ONU                             #
###############################################################

    def create_mock_onus(self, num_onus):
        onus = []
        j = 0
        for port in self.pon_ports.all():
            j = j + 1
            for index in range(1, int(num_onus) + 1):
                onu = {
                    'serial_number': "ROBOT%s%s" % (j, index),
                    'vendor': 'Robot',
                    'pon_port_id': port['id']
                }
                onus.append(onu)
                self.onus.insert(onu)
        return onus

    def get_rest_onus(self):
        return self.onus.all()

    def update_onu_id(self, onu, id):
        Onu = Query()
        self.onus.update({'id': id}, Onu.serial_number == onu['serial_number'])

###############################################################
#                             UNI                             #
###############################################################

    def create_mock_unis(self):
        # NOTE I believe UNI port number must be unique across OLT
        unis = []
        i = 0
        for onu in self.onus.all():
            uni = {
                'name': 'Test UniPort %s' % i,
                'port_no': i,
                'onu_device_id': onu['id']
            }
            unis.append(uni)
            self.uni_ports.insert(uni)
            i = i + 1
        return unis

    def get_rest_unis(self):
        return self.uni_ports.all()

    def update_uni_id(self, uni, id):
        UniPort = Query()
        self.uni_ports.update({'id': id}, UniPort.name == uni['name'])

###############################################################
#                             WHITELIST                       #
###############################################################

    def create_mock_whitelist(self, attworkflowservice_id):
        entries = []
        for onu in self.onus.all():
            e = {
                'owner_id': attworkflowservice_id,
                'serial_number': onu['serial_number'],
                'pon_port_id': self._find_pon_port_by_onu(onu)['port_no'],
                'device_id': self._find_olt_by_onu(onu)['of_id']
            }
            entries.append(e)

        return entries

###############################################################
#                             EVENTS                          #
###############################################################

    def generate_onu_events(self):
        events = []
        for onu in self.onus.all():
            ev = {
                'status': 'activated',
                'serialNumber': onu['serial_number'],
                'portNumber': str(self._find_uni_by_onu(onu)['port_no']),
                'deviceId': self._find_olt_by_onu(onu)['of_id'],
            }
            events.append(ev)
        return events

    def generate_auth_events(self):
        events = []
        for onu in self.onus.all():
            ev = {
                'authenticationState': "APPROVED",
                'deviceId': self._find_olt_by_onu(onu)['dp_id'],
                'portNumber': self._find_uni_by_onu(onu)['port_no'],
            }
            events.append(ev)
        return events

    def generate_dhcp_events(self):
        events = []
        for onu in self.onus.all():
            ev = {
                'deviceId': self._find_olt_by_onu(onu)['dp_id'],
                'portNumber': self._find_uni_by_onu(onu)['port_no'],
                "macAddress": "aa:bb:cc:ee:ff",
                "ipAddress": "10.10.10.10",
                "messageType": "DHCPACK"
            }
            events.append(ev)
        return events

###############################################################
#                             HELPERS                         #
###############################################################

    def _find_uni_by_onu(self, onu):
        Uni = Query()
        # NOTE there's an assumption that 1 ONU has 1 UNI Port
        return self.uni_ports.search(Uni.onu_device_id == onu['id'])[0]

    def _find_pon_port_by_onu(self, onu):
        # this does not care about the olt id...
        PonPort = Query()
        return self.pon_ports.search(PonPort.id == onu['pon_port_id'])[0]

    def _find_olt_by_onu(self, onu):
        pon_port = self._find_pon_port_by_onu(onu)
        Olt = Query()
        return self.olts.search(Olt.id == pon_port['olt_device_id'])[0]
Ejemplo n.º 54
0
Archivo: cart.py Proyecto: cbonoz/cARt
class Cart:

    def __init__(self, port = 9001):
        self.port = port
        self.db = TinyDB('db/db.json')

    def create_payment(self, item, amount = 1):
        payment = paypalrestsdk.Payment({
            "intent": "sale",
            "payer": {
                "payment_method": "paypal"},
            "redirect_urls": {
                "return_url": "http://localhost:%s/payment/execute" % self.port,
                "cancel_url": "http://localhost:3000/"},
            "transactions": [{
                "item_list": {
                    "items": [{
                        "name": item['name'],
                        "sku": str(uuid.uuid4()),
                        "price": item['price'],
                        "currency": "USD",
                        "quantity": amount}]},
                "amount": {
                    "total": item['price'] * amount,
                    "currency": "USD"},
                "description": "This is the payment transaction description."}]})
        if payment.create():
            print("Payment created successfully")
        else:
            print(payment.error)
        return payment

    def execute_payment(self, payment_id, payer_id='DUFRQ8GWYMJXC'):
        # ID of the payment. This ID is provided when creating payment.
        payment = Payment.find(payment_id)

        # PayerID is required to approve the payment.
        if payment.execute({"payer_id": payer_id}):  # return True or False
            print("Payment[%s] execute successfully" % (payment.id))
        else:
            print(payment.error)


    def get_payments(self, count = 10):
        payment_history = Payment.all({"count": count}).payments
        return payment_history

    def get_data_file(self, filename):
        return os.path.join(DATA_DIR, filename)

    def record_item(self, item, payment=True):
        if 'user' not in item:
            item['user'] = fake.name()

        if 'converted' not in item:
            item['converted'] = random.choice([True, False])
        res = False
        if payment:
            res = self.create_payment(item, 1)
        self.db.insert(item)
        return res

    def get_items(self):
        return self.db.all()
Ejemplo n.º 55
0
class ProgramEngine:

    # pylint: disable=exec-used

    _instance = None

    def __init__(self):
        self._program = None
        self._log = ""
        self._programs = TinyDB("data/programs.json")
        query = Query()
        for dirname, dirnames, filenames, in os.walk(PROGRAM_PATH):
            dirnames
            for filename in filenames:
                if PROGRAM_PREFIX in filename:
                    program_name = filename[len(PROGRAM_PREFIX):-len(PROGRAM_SUFFIX)]
                    if self._programs.search(query.name == program_name) == []:
                        logging.info("adding program %s in path %s as default %r", program_name, dirname, ("default" in dirname))
                        self._programs.insert({"name": program_name, "filename": os.path.join(dirname, filename), "default": str("default" in dirname)})

    @classmethod
    def get_instance(cls):
        if not cls._instance:
            cls._instance = ProgramEngine()
        return cls._instance

    def prog_list(self):
        return self._programs.all()

    def save(self, program):
        query = Query()
        self._program = program
        program_db_entry = program.as_dict()
        program_db_entry["filename"] = os.path.join(PROGRAM_PATH, PROGRAM_PREFIX + program.name + PROGRAM_SUFFIX)
        if self._programs.search(query.name == program.name) != []:
            self._programs.update(program_db_entry, query.name == program.name)
        else:
            self._programs.insert(program_db_entry)
        f = open(program_db_entry["filename"], 'w+')
        json.dump(program.as_dict(), f)
        f.close()

    def load(self, name):
        query = Query()
        program_db_entries = self._programs.search(query.name == name)
        if program_db_entries != []:
            logging.info(program_db_entries[0])
            f = open(program_db_entries[0]["filename"], 'r')
            self._program = Program.from_dict(json.load(f))
        return self._program

    def delete(self, name):
        query = Query()
        program_db_entries = self._programs.search(query.name == name)
        if program_db_entries != []:
            os.remove(program_db_entries[0]["filename"])
            self._programs.remove(query.name == name)

    def create(self, name, code):
        self._program = Program(name, code)
        return self._program

    def is_running(self, name):
        return self._program.is_running() and self._program.name == name

    def check_end(self):
        return self._program.check_end()

    def log(self, text):
        self._log += text + "\n"

    def get_log(self):
        return self._log
Ejemplo n.º 56
0
class Nekowat(object):
    """Attributes:

    _conf_path (str): Path to the configuration file.
    _conf (dict): Parsed configuration
    token (str): Telegram bot token
    owner (int): ID of the bot owner. The owner can do certain special actions
        such as adding or removing users from whitelist.
    use_whitelist (bool): Flag indicating whether the bot allows commands from
        any user (False) or only users defined in the whitelist (True)
    whitelist (dict): Users that are allowed to interact with the bot.
        It uses the following structure:

            {
                'user1': 123456789,
                'user2': 123456788
            }

    bot (TeleBot): TeleBot instance.
    db (TinyDB): Database instance.
    wat (Query): TinyDB query.
    """
    def init_bot(self, config_path=None, level='INFO'):
        """Initializer.

        Args:
            config_path (str): Path to the configuration file. If this is not
                provided, the bot expects the path to be available in the
                environment variable 'NEKOWAT_CONF'.
            level (str): Logging level to use in the internal logger of the bot.
        """
        # Parse configuration file
        if not config_path:
            config_path = os.path.abspath(os.getenv('NEKOWAT_CONF', ''))

        if not config_path or not os.path.isfile(config_path):
            sys.exit('Could not find configuration file')

        self._conf_path = config_path

        with open(config_path) as f:
            self._conf = json.load(f)

        # Bot settings
        self.token = self._conf['tg']['token']
        self.owner = self._conf['tg']['owner']
        self.use_whitelist = self._conf['tg']['use_whitelist']
        self.whitelist = self._conf['tg']['whitelist']

        # TinyDB
        #
        # Row structure:
        #
        # - name (str): Name of the file
        # - file_ids (list): List of file IDs ordered by size
        # - expressions (list): List of expressions that match the image
        self.db = TinyDB(self._conf['db'])
        self.db.table_class = SmartCacheTable
        self.wat = Query()

        # Bot initialization
        telebot.logger.setLevel(level)
        self.bot = telebot.TeleBot(self.token,
                                   threaded=True,
                                   skip_pending=True)

        # Inherit bot methods
        self.answer_inline_query = self.bot.answer_inline_query
        self.inline_handler = self.bot.inline_handler
        self.message_handler = self.bot.message_handler
        self.register_next_step_handler = self.bot.register_next_step_handler
        self.reply_to = self.bot.reply_to
        self.send_message = self.bot.send_message
        self.send_photo = self.bot.send_photo

    def _save_conf(self):
        """Save configuration to file."""
        with open(self._conf_path, 'w') as f:
            json.dump(self._conf, f)

    def start(self):
        """Bot starter."""
        print('Start polling')
        self.bot.polling(none_stop=True)

    def stop(self):
        """Bot stopper."""
        print('Stop polling')
        self.bot.stop_polling()

    def is_owner(self, user_id):
        """Checks whether a message comes from the owner."""
        return user_id == self.owner

    def is_allowed(self, user_id):
        """Checks whether a message comes from a whitelisted user.

        Note that disabling the whitelist results in every user being able
        to communicate with the bot.
        """
        if not self.use_whitelist or user_id == self.owner:
            return True

        return user_id in self.whitelist.values()

    def add_whitelist(self, name, user_id):
        """Adds a user to the whitelist.

        This updates the configuration file.

        Args:
            name (str): Name of the user.
            user_id (int): User ID.

        Returns:
            Boolean indicating if the user was added or not.
        """
        if name in self.whitelist.keys():
            # Already exists
            return False

        self.whitelist[name] = user_id
        self._conf['tg']['whitelist'] = self.whitelist

        self._save_conf()

        return True

    def rm_whitelist(self, name):
        """Removes a user from the whitelist.

        This updates the configuration file.

        Args:
            name (str): Name of the user.

        Returns:
            Boolean indicating if the user was removed or not.
        """
        if name not in self.whitelist.keys():
            # Does not exist
            return False

        del self.whitelist[name]
        self._conf['tg']['whitelist'] = self.whitelist

        self._save_conf()

        return True

    def toggle_whitelist(self):
        """Toggle use of whitelist."""
        new_status = not self.use_whitelist

        self._conf['tg']['use_whitelist'] = new_status
        self.use_whitelist = new_status

        self._save_conf()

    def create_wat(self, name, file_ids):
        """Insert a new wat record in the database.

        Args:
            name (str): Name of the wat.
            file_ids (list[str]): List of file IDs in Telegram (ordered by size)
        """
        self.db.insert({'name': name, 'file_ids': file_ids, 'expressions': []})

    def get_all_wats(self):
        """Get all wats from the database.

        Returns:
            List of tuples containing file ID and name
        """
        return self.db.all()

    def get_wats_by_expression(self, expression):
        """Get all rows that match an expression.

        Returns:
            List of database rows
        """
        return self.db.search(self.wat.expressions.any([expression]))

    def wat_exists(self, name):
        """Check whether a wat exists already."""
        wat = self.db.get(self.wat.name == name)

        if wat:
            return True

        return False

    def get_wat(self, name):
        """Get a WAT by name."""
        return self.db.get(self.wat.name == name)

    def set_wat_expressions(self, name, expressions):
        """Update a WAT and set the new expressions."""
        self.db.update({'expressions': expressions}, self.wat.name == name)

    def remove_wat(self, doc_id):
        """Remove a WAT by ID."""
        return self.db.remove(doc_ids=[
            doc_id,
        ])
Ejemplo n.º 57
0
def my_index():
    data = TinyDB("data.json")
    data = data.table("Zone")
    return jsonify({"data": data.all()})
Ejemplo n.º 58
0
from tinydb import TinyDB

# connect to database
db = TinyDB('sensor_reports.json')

#print first 5
print(db.all()[:5])
Ejemplo n.º 59
0
def get_drafts():
    db = TinyDB(cfg.config['db_name'])
    # Record = Query()
    records = [x for x in db.all() if x['draft_user'] > 0]
    db.close()
    return sorted(records, key=itemgetter('pubdate_api'), reverse=True)
Ejemplo n.º 60
0
def test_caching_read():
    db = TinyDB(storage=CachingMiddleware(MemoryStorage))
    assert db.all() == []