def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db')) # Create TinyDB instance db = TinyDB(path, storage=JSONStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} get = lambda s: db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None
def remove(name): db=TinyDB(path_db_) temp = Query() data=db.search(temp.name==name) if len(data)>0: path=get_file(name) os.remove(path) db.remove(temp.name==name) return True return False
def test_delete(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) q = Query() db.insert({'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}}) assert db.search(q.network.id == '114') == [ {'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}} ] db.remove(q.network.id == '114') assert db.search(q.network.id == '114') == []
class Test_004_Delete_existing_data_by_valid_query_Function(unittest.TestCase): def setUp(self): self.db = TinyDB('db.json') def tearDown(self): self.db.purge() self.db.all() def test_simple_delete_valid_exist(self): print("case 4 delete existing data by valid query") self.db.insert({'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1}) self.db.remove(where('Name') == 'Greg') result=self.db.search(where('Name') == 'Greg') self.assertEqual(result,[])
def write_prediction_cache( region_id, type_id, prediction_data, cache_path=CACHE_PATH, db_filename='prophet.json', logger=logging.getLogger('publicAPI') ): """update tinydb latest prediction Args: region_id (int): EVE Online region ID type_id (int): EVE Online type ID prediction_data (:obj:`pandas.DataFrame`): data to write to cache cache_path (str, optional): path to caches db_filename (str, optional): name of tinydb Returns: None """ logger.info('--caching result') utc_today = datetime.utcnow().strftime('%Y-%m-%d') prediction_db = TinyDB(path.join(cache_path, db_filename)) ## clear previous cache ## prediction_db.remove( (Query().cache_date <= utc_today) & (Query().region_id == region_id) & (Query().type_id == type_id) ) ## Prepare new entry for cache ## cleaned_data = prediction_data.to_json( date_format='iso', orient='records' ) data = { 'cache_date': utc_today, 'region_id': region_id, 'type_id': type_id, 'lastWrite': datetime.utcnow().timestamp(), 'prediction':cleaned_data } logger.debug(data) prediction_db.insert(data) prediction_db.close()
class ListCache(object): DB_FILE = 'listing_db.json' DB_TTL = timedelta(hours=12) def __init__(self): self.db = TinyDB(os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), ListCache.DB_FILE)) @property def db(self): return self._db @db.setter def db(self, db): self._db = db def listing_in_cache(self, listing): lquery = Query() return self.db.contains(lquery.hsh == listing.hsh) def retrieve_listing(self, listing): lquery = Query() list_dict = self.db.get(lquery.hsh == listing.hsh) return Listing.from_dict(list_dict) def insert_listing(self, listing): if self.listing_in_cache(listing): self.update_listing(listing) else: list_dict = listing.as_dict() list_dict['last_updated'] = datetime.now().isoformat() list_dict['hsh'] = listing.hsh self.db.insert(list_dict) def remove_listing(self, listing): lquery = Query() self.db.remove(lquery.hsh == listing.hsh) def update_listing(self, listing): lquery = Query() if self.listing_in_cache(listing): self.remove_listing(listing) self.insert_listing(listing) def remove_old_listings(self): list_ar = self.db.all() for listing in list_ar: if datetime.strptime(listing['last_updated'], '%Y-%m-%dT%H:%M:%S.%f') < datetime.now() - ListCache.DB_TTL: self.remove_listing(Listing.from_dict(listing))
class NumberStore(): def __init__(self, filename): self.db = TinyDB(filename) def initNumber(self, number): if not self.getNumberDict(number): self.db.insert({'number': number, 'accesses': [], 'info': '#yolo'}) def touchNumber(self, number): self.initNumber(number) #print(self.getNumberDict(number)) #accesses = self.getNumberDict(number)['accesses'].append(datetime.datetime.now()) #self.db.update({'accesses': accesses}, where('number') == number) def getNumberDict(self, number): return self.db.get(where('number') == number) def getNumberList(self): return (entry['number'] for entry in self.db.all()) def getAccesses(self, number): # if not number in self.db['numbers']: # return None # if not 'info' in self.db['numbers'][number]: # return None # # return self.db['numbers'][number]['info'] return [] def getInfo(self, number): return self.getNumberDict(number)['info'] def setInfo(self, number, info): self.initNumber(number) self.db.update({'info': info}, where('number') == number) print(self.db.all()) def deleteNumber(self, number): self.db.remove(where('number') == number) print(self.db.all()) def close(self): self.db.close()
def remove(name: str) -> bool: """ Remove corpus :param string name: corpus name :return: True or False """ db = TinyDB(corpus_db_path()) temp = Query() data = db.search(temp.name == name) if len(data) > 0: path = get_corpus_path(name) os.remove(path) db.remove(temp.name == name) return True return False
class FolderManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, FOLDER_DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def add_folder(self, file_name): if not self.folder_exists(file_name): entry = {'file_name': file_name} self._db.insert(entry) def get_all_entries(self): return self._db.all() def folder_exists(self, file_name): """ checks if a folder has been added """ entries = self._db.search(where('file_name') == file_name) if entries: return True else: return False def remove_element(self, file_name): self._db.remove(where('file_name') == file_name) def get_file_names(self): """ returns all the file names of folders that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['file_name']) return file_names def get_folder_by_name(self, expected_name): """ get documents by the specified property """ entry = self._db.get(where('file_name') == expected_name) return entry def clear_all(self): self._db.purge()
class BotHelper: def __init__(self, bot): self.db = TinyDB('db.json') self.bot = bot def executeAssignment(self, chat_id, assignment): if assignment['command'] == 'add': entries = Query() if self.db.search(entries.trigger == assignment['trigger']): self.bot.sendMessage(chat_id=chat_id, text='No duplicates allowed for now') return False else: if assignment['response_type'] == 'image' or assignment['response_type'] == 'gif': filename = assignment['response'].split('/')[-1] extention = filename.split('.')[-1] urllib.request.urlretrieve(assignment['response'], 'media/images/' + assignment['trigger'] + '.' + extention) assignment['response'] = 'media/images/' + assignment['trigger'] + '.' + extention self.db.insert({ 'trigger': assignment['trigger'], 'response_type':assignment['response_type'], 'response': assignment['response'], }) if assignment['command'] == 'del': self.db.remove(where('trigger') == assignment['trigger']) if assignment['command'] == 'list': entries = self.db.all() for entry in entries: self.bot.sendMessage(chat_id=chat_id, text=entry) def executeTrigger(self, chat_id, message): words = message.split(' ') entries = Query() for word in words: entry = self.db.search(entries.trigger == word) if entry: if entry[0]['response_type'] == 'text': self.bot.sendMessage(chat_id=chat_id, text=entry[0]['response']) if entry[0]['response_type'] == 'image': img = open(entry[0]['response'], 'rb') self.bot.sendPhoto(chat_id=chat_id, photo=img) if entry[0]['response_type'] == 'gif': img = open(entry[0]['response'], 'rb') self.bot.sendDocument(chat_id=chat_id, document=img)
class TinyDBConvertor(object): def __init__(self, collection_name, db_name, db_path=LOCAL_DIR): self.local_db_fullpath = os.path.join(db_path, db_name) self.local_db = TinyDB(self.local_db_fullpath) self.collection_name = collection_name def reload(self): self.local_db.close() self.local_db = TinyDB(self.local_db_fullpath) def find(self): self.reload() query = Query() resp = self.local_db.search(query.type == self.collection_name) self.local_db.close() return resp def find_one(self, entry): self.reload() query = Query() resp = self.local_db.search((query.type == self.collection_name) & (query.fqu == entry['fqu'])) self.local_db.close() if len(resp) == 0: return None else: return resp[0] def save(self, new_entry): self.reload() new_entry['type'] = self.collection_name resp = self.local_db.insert(new_entry) self.local_db.close() return resp def remove(self, entry): self.reload() query = Query() resp = self.local_db.remove((query.type == self.collection_name) & (query.fqu == entry['fqu'])) self.local_db.close() return resp
class DataStore(object): def __init__(self, store_path): self.store_path = os.path.join(store_path,"META") try: os.makedirs(self.store_path) except OSError as exc: if exc.errno != errno.EEXIST: raise self.db = TinyDB(os.path.join(self.store_path,"__meta__.json")) def add_file(self, file_path): file_path = self._get_internal_path(file_path) self._add_to_db(file_path) def remove_file(self,file_path): file_path = self._get_internal_path(file_path) self._remove_from_db(file_path) def list_all(self): return self._list_all_db() def _init_file_list(self): with open(self.store_path,"r") as f: for line in tqdm(f): self.file_list.append(f) def _init_db(self): self.db = TinyDB(self.store_path) def _add_to_db(self,file_path): if not self.db.contains(where('file_path')== file_path): self.db.insert({'file_path':file_path}) def _remove_from_db(self,file_path): self.db.remove(where('file_path') == file_path) def _list_all_db(self): return [rec['file_path'] for rec in self.db.all()] def _get_internal_path(self, path): return get_internal_path(path)
def write_to_cache_file( data, cache_path, type_id=0, region_id=0, logger=logging.getLogger(PROGNAME) ): """save data to tinyDB Args: data (:obj:`pandas.DataFrame`): data to write out cache_path (str): path to cache file type_id (int, optional): EVE Online type_id region_id (int, optional): EVE Online region_id logger (:obj:`logging.logger`): logging handle Returns: None """ ## get DB ## logger.info('Writing data to cache') tdb = TinyDB(cache_path) date_min = data['date'].min() logger.debug(date_min) ## clean out existing entries ## if (type_id and region_id): logger.info('--Removing old cache entries') tdb.remove( (Query().region_id == region_id) & (Query().type_id == type_id) & (Query().date >= date_min) ) caching_data_str = data.to_json( date_format='iso', orient='records' ) cache_data = json.loads(caching_data_str) logger.info('--Writing to cache file') tdb.insert_multiple(cache_data)
class Test_007_Delete_Not_existing_data_by_valid_query_Function(unittest.TestCase): def setUp(self): self.db = TinyDB('db.json') def tearDown(self): self.db.purge() self.db.all() def test_simple_delete_not_exist(self): print("case 7 Delete Non-existing data by valid query") result=self.db.remove(where('Name') == 'Wendy') self.assertEqual(result,None)
def getDocByUrl(url): cached = False db = TinyDB('cache.json') ago = int(((datetime.datetime.now() - datetime.timedelta(minutes=CACHE_MIN)) - datetime.datetime(1970,1,1)).total_seconds()) #.timestamp()) db.remove(where('date') < ago) search = db.search((where('url') == url) & (where('date') > ago)) if len(search) > 0: cached = True print "URL: {0} was found in cache!\n".format(url) if cached: return base64.b64decode(search[0]['content']) else: req = makeUrlRequest(url) if req != None: content = req.read() date = int((datetime.datetime.now() - datetime.datetime(1970,1,1)).total_seconds()) #.timestamp()) db.insert({'url':url,'date':date, 'content':base64.b64encode(content)}) return content else: print "ERROR: REQUEST COULD NOT BE MADE!\n" return ""
def test_json_readwrite(): """ Regression test for issue #1 """ # Create TinyDB instance db = TinyDB(path, policy='autoadd', storage=SFTPStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} get = lambda s: db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None
class TinyDBStore(object): def __init__(self): self.drafts_db = TinyDB('event_drafts.json') self.events_db = TinyDB('events.json') # Drafts def contains_draft(self, user_id): return self.drafts_db.contains(Query().user_id == user_id) def new_draft(self, user_id): if self.contains_draft(user_id): self.drafts_db.remove(Query().user_id == user_id) self.drafts_db.insert({ 'user_id': user_id, 'current_field': 0, 'event': {} }) def update_draft(self, user_id, event, current_field): self.drafts_db.update({ 'user_id': user_id, 'current_field': current_field, 'event': event }, Query().user_id == user_id) def get_draft(self, user_id): return self.drafts_db.get(Query().user_id == user_id) def remove_draft(self, user_id): self.drafts_db.remove(Query().user_id == user_id) # Events def insert_event(self, event): event_id = self.events_db.insert(event) event['id'] = event_id return event def update_event(self, event): self.events_db.update(event, eids=[event.eid]) def remove_event(self, event): self.events_db.remove(eids=[event['id']]) def get_events(self, user_id, name=None): if name: return self.events_db.search((Query().user_id == user_id) & (Query().name.test(lambda v: name in v))) return self.events_db.search(Query().user_id == user_id) def get_event(self, event_id): return self.events_db.get(eid=int(event_id))
def DeleteOrder(orderID): db = TinyDB("activetrades.json") order = Query() db.remove(order.ID == str(orderID)) db.all()
class NoteTaker(): """Class to provide the taking note ability. """ def __init__(self): """Initialization method of :class:`dragonfire.takenote.NoteTaker` class. """ home = expanduser("~") # Get the home directory of the user self.db = TinyDB(home + '/.dragonfire_db.json') # This is where we store the database; /home/USERNAME/.dragonfire_db.json self.is_server = False self.db_session = None def check_setnote(self, com, doc, h, user_answering, userin, user_prefix): """Method to Dragonfire's check set commands for note taking ability. Args: com (str): User's command. doc: doc of com from __init__.py h: doc helper from __init__.py user_answering: User answering state dictionary. userin: :class:`dragonfire.utilities.TextToAction` instance. user_prefix: user's preferred titles. """ if h.check_verb_lemma("create") and h.check_lemma("to") and h.check_verb_lemma("do") and h.check_noun_lemma("list"): takenote_query = "" for token in doc: if not (token.lemma_ == "create" or token.lemma_ == "to" or token.lemma_ == "do" or token.lemma_ == "list" or token.is_stop): takenote_query += ' ' + token.text takenote_query = takenote_query.strip() user_answering['status'] = True user_answering['for'] = 'note_taking' user_answering['reason'] = 'todo_list' if not takenote_query: # when command come without note. return userin.say(choice([ "Okay, " + user_prefix + ". What is the name?", "I'm listening for give a name to list, " + user_prefix + ".", "Alright, " + user_prefix + ". Please, say a list name.", "Ready. What is the name of list?", "Say a name for list." ])) else: # when command came with note. user_answering['todo_list']['name'] = "" user_answering['todo_list']['count'] = 1 user_answering['takenote_query'] = takenote_query return userin.say(choice([ "1. item receipt. Give a name to the list, " + user_prefix + "." ])) elif h.check_verb_lemma("remind") and h.check_text("me"): takenote_query = "" for token in doc: if not (token.lemma_ == "remind" or token.lemma_ == "me" or token.is_stop): takenote_query += ' ' + token.text takenote_query = takenote_query.strip() user_answering['status'] = True user_answering['for'] = 'note_taking' user_answering['reason'] = 'remind' if not takenote_query: # when command came without note. return userin.say(choice([ "Understood. what is note?", "Yes! I'm listening the note.", "Alright, " + user_prefix + ". What will I remind?", "Ready to record, " + user_prefix + ". what is the note?", "Okay, " + user_prefix + ". Please enter the note." ])) else: # when command came with note. user_answering['takenote_query'] = takenote_query return userin.say(choice([ "It's Okay, " + user_prefix + ". When will I remind?", "Alright. When do you want to remember?", "Alright, " + user_prefix + ". What is the remind time?", "Note taken. Give the remind time.", "I get it, " + user_prefix + ". Please enter the remind time." ])) elif h.check_verb_lemma("take") and h.check_noun_lemma("note"): user_answering['status'] = True user_answering['for'] = 'note_taking' user_answering['reason'] = 'basic' return userin.say(choice([ "Yes, " + user_prefix + ".", "Yes. I'm listening", "Alright, " + user_prefix + ".", "Ready to record, " + user_prefix + ".", "Keep going, " + user_prefix + "." ])) elif h.check_verb_lemma("note") and h.check_lemma("that"): takenote_query = "" for token in doc: if not (token.lemma_ == "note" or token.lemma_ == "that" or token.is_stop): takenote_query += ' ' + token.text takenote_query = takenote_query.strip() if not takenote_query: # when the input does not contain a note user_answering['status'] = True user_answering['for'] = 'note_taking' user_answering['reason'] = 'basic' return userin.say(choice([ "Yes, " + user_prefix + ".", "Yes. I'm listening", "Alright, " + user_prefix + ".", "Ready to record, " + user_prefix + ".", "Keep going, " + user_prefix + "." ])) else: self.db_upsert(takenote_query) user_answering['status'] = False return userin.say(choice(["The note taken", "The note was recorded", "I get it"]) + choice([".", ", " + user_prefix + "."])) if user_answering['status']: if com.startswith("whatever") or com.startswith("give up") or com.startswith("not now") or com.startswith("forget it") or com.startswith("WHATEVER") or com.startswith("GIVE UP") or com.startswith("NOT NOW") or com.startswith("FORGET IT"): # for writing interrupt while taking notes and creating reminders. user_answering['status'] = False user_answering.pop('todo_list', None) user_answering.pop('takenote_query', None) return userin.say(choice(["As you wish", "I understand", "Alright", "Ready whenever you want", "Get it"]) + choice([".", ", " + user_prefix + "."])) if user_answering['reason'] == 'todo_list': if not user_answering['todo_list']['name']: user_answering['todo_list']['name'] = com if not user_answering['takenote_query']: # keeper compare for the elastic usage. return userin.say("I get it. Enter the 1. item...") else: self.db_upsert(user_answering['takenote_query'], None, None, user_answering['todo_list']['name'], user_answering['todo_list']['count'], True) return userin.say("I get it. Enter the " + str(user_answering['todo_list']['count'] + 1) + ". item...") else: if com.startswith("enough") or com.startswith("it is okay") or com.startswith("it is ok") or com.startswith("it's okay") or com.startswith("it's ok") or com.startswith("end") or com.startswith("ENOUGH") or com.startswith("IT IS OKAY") or com.startswith("IT IS OK") or com.startswith("IT'S OKAY") or com.startswith("IT'S OK") or com.startswith("END"): temporary_keeper = user_answering['todo_list']['name'] user_answering['status'] = False user_answering.pop('todo_list', None) user_answering.pop('takenote_query', None) return userin.say(choice([ "List was recorded", temporary_keeper + " ToDo List generated", "Get it. List ready" ]) + choice([".", ", " + user_prefix + "."])) user_answering['todo_list']['count'] += 1 self.db_upsert(com, None, None, user_answering['todo_list']['name'], user_answering['todo_list']['count'], True) return userin.say(choice([ "It is Okay. Give " + str(user_answering['todo_list']['count'] + 1) + ". item", "Get it. Give other item", "Okay. Enter other one", "Okay, you can say other", "Get it. Listening for other" ]) + choice([".", ", " + user_prefix + "."])) if user_answering['reason'] == 'remind': if user_answering['remind_again']: # for using same reminder on different time. user_answering['remind_again'] = False if com.startswith("yes") and com.endswith("yes") or com.startswith("yep") and com.endswith("yep") or com.startswith("okay") and com.endswith("okay") or h.check_deps_contains("do it"): return userin.say(choice([ "It's okay", "Get it", "reminder will repeat", " It has been set again" ]) + choice([", " + user_prefix + ". ", ". "]) + choice([ "What is the remind time?", "When do you want to remind?", "Give remind time.", "Say the time" ])) else: user_answering['status'] = False user_answering.pop('takenote_query', None) return userin.say(choice([ "As you wish", "I understand", "Alright", "Ready whenever you want", "Get it" ]) + choice([". ", ", " + user_prefix + ". "])) if not user_answering['takenote_query']: user_answering['takenote_query'] = com return userin.say(choice(["It's okay", "Get it", "note was recorded", "The note taken"]) + choice([", " + user_prefix + ". ", ". "]) + choice([ "What is the remind time?", "When do you want to remind?", "Give the remind time.", "Say the time" ])) else: # flexible usage is going to be set. if com.startswith("after") or com.endswith("later") or com.startswith("in") or com.startswith(""): if h.check_noun_lemma("minute") or h.check_noun_lemma("minutes"): takenote_query = "" for token in doc: if not (token.lemma_ == "after" or token.lemma_ == "later" or token.lemma_ == "minute" or token.lemma_ == "minutes" or token.is_stop): takenote_query += ' ' + token.text mnt = float(takenote_query) if isinstance(mnt, float): # timestamp is a kind of second. time = datetime.datetime.now().timestamp() + mnt * 60 time = int(time / 60) self.db_upsert(user_answering['takenote_query'], None, time, None, None, False, True, True) # return userin.say(str(time.strftime("%H:%M"))) else: return userin.say("Repeat!") elif h.check_noun_lemma("hour") or h.check_noun_lemma("hours"): takenote_query = "" for token in doc: if not (token.lemma_ == "after" or token.lemma_ == "later" or token.lemma_ == "hour" or token.lemma_ == "hours" or token.is_stop): takenote_query += ' ' + token.text hr = int(takenote_query) if isinstance(hr, float): # timestamp is a kind of second. time = datetime.datetime.now().timestamp() + hr * 60 * 60 time = int(time / 60) self.db_upsert(user_answering['takenote_query'], None, time, None, None, False, True, True) # return userin.say(str(time)) else: return userin.say("Repeat!") elif h.check_noun_lemma("day") or h.check_noun_lemma("days"): takenote_query = "" for token in doc: if not (token.lemma_ == "after" or token.lemma_ == "later" or token.lemma_ == "day" or token.lemma_ == "days" or token.is_stop): takenote_query += ' ' + token.text dy = int(takenote_query) if isinstance(dy, float): # timestamp is a kind of second. time = datetime.datetime.now().timestamp() + dy * 24 * 60 * 60 time = int(time / 60) self.db_upsert(user_answering['takenote_query'], None, time, None, None, False, True, True) # return userin.say(str(time)) else: return userin.say("Repeat!") user_answering['status'] = False user_answering.pop('takenote_query', None) if not user_answering['is_reminder_active']: # if reminder checker loop not run, start the loop. thread.start_new_thread(reminder.remind, (self, userin, user_prefix, user_answering)) return userin.say(choice(["It's okay", "Get it", "note was recorded", "The note taken"]) + choice([", " + user_prefix + ". ", ". "]) + choice([ "Reminder Added.", "I'm waiting to remind.", "I will remind.", "Reminder has been set." ])) else: # taking note second compare here. user_answering['status'] = False self.db_upsert(com) return userin.say(choice([ "The note Taken", "Alright", "I noted", "Ready whenever you want to get it", "Get it" ]) + choice([".", ", " + user_prefix + ". "])) if h.check_lemma("delete") or h.check_verb_lemma("remove"): if h.check_lemma("all"): if h.check_lemma("over") and h.check_noun_lemma("database"): self.db_delete(None, None, True) return userin.say("notes database cleared") if h.check_lemma("note") or h.check_lemma("notes"): self.db_delete() return userin.say("All notes Deleted") if (h.check_verb_lemma("do") and h.check_noun_lemma("lists")) or (h.check_verb_lemma("do") and h.check_noun_lemma("list")): self.db_delete(None, None, False, None, None, True) return userin.say("All to do lists deleted") if h.check_lemma("reminder") or h.check_lemma("reminders"): self.db_delete(None, None, False, None, None, False, True) return userin.say("All reminders deleted") return None def check_getnote(self, com, doc, h, user_answering, userin, user_prefix): """Method to Dragonfire's check get commands for note taking ability. Args: com (str): User's command. doc: doc of com from __init__.py h: doc helper from __init__.py user_answering: User answering state dictionary. userin: :class:`dragonfire.utilities.TextToAction` instance. user_prefix: user's preferred titles. """ if h.check_verb_lemma("say") or h.check_verb_lemma("get") or h.check_verb_lemma("give"): if h.check_noun_lemma("note") or h.check_noun_lemma("notes"): return userin.say(self.db_get(None, None)) if h.check_verb_lemma("do") or (h.check_verb_lemma("do") and h.check_noun_lemma("list")): takenote_query = "" for token in doc: if not (token.lemma_ == "say" or token.lemma_ == "get" or token.lemma_ == "give" or token.lemma_ == "do" or token.lemma_ == "list" or token.lemma_ == "dragonfire" or token.is_stop): takenote_query += ' ' + token.text takenote_query = takenote_query.strip() if not takenote_query: # when command come without note. result = self.db_get(None, None, True) if not result: return userin.say("There is no list") return userin.say(choice([ "which list", "Alright, say the list name", "Okay, What is the name of list", "List name" ]) + choice(["?", ", " + user_prefix + "?"])) else: # when command came with note. result = self.db_get(None, com, True) if not result: return userin.say(choice([ "This name is not exist", "I couldn't find it, say again", "Not found, Repeat", "Not exist, speak again" ]) + choice(["?", ", " + user_prefix + "?"])) else: return userin.say(result) if com.startswith("whatever") or com.startswith("give up") or com.startswith("not now") or com.startswith("forget it") or com.startswith("WHATEVER") or com.startswith("GIVE UP") or com.startswith("NOT NOW") or com.startswith("FORGET IT"): # for writing interrupr while taking notes and creating reminders. return userin.say( choice(["As you wish", "I understand", "Alright", "Ready whenever you want", "Get it"]) + choice( [". ", ", " + user_prefix + ". "])) if (h.check_lemma("give") or h.check_lemma("say") or h.check_lemma("get")) or h.check_verb_lemma("remind"): if h.check_noun_lemma("names") or h.check_noun_lemma("them") or not h.check_noun_lemma(""): result = self.db_get(None, None, True) return userin.say("list of the lists:\n" + result) result = self.db_get(None, com, True) if not result: return userin.say(choice([ "This name is not exist", "I couldn't find it, say again", "Not found, Repeat", "Not exist, speak again" ]) + choice(["?", ", " + user_prefix + "?"])) else: return userin.say(result) return None def db_get(self, note, list_name, is_todolist=False, is_reminder=False, is_public=True, user_id=None): """Function to get a note record from the database. NOT COMPLETED. Args: note (str): note that pulled from the user's input/command. Keyword Args: is_reminder (int): Is it a note for remind? (default: False) is_todolist (int): Ia it a to do list? (default: False) is_public (int): Is it a public record? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: try: notepad = self.db_session.query(NotePad).filter(NotePad.note == note, NotePad.is_reminder == is_reminder, NotePad.user_id == user_id, NotePad.is_public == is_public).order_by(NotePad.counter.desc()).first() answer = notepad.note return self.mirror(answer) except NoResultFound: return None else: if is_reminder: result = self.db.search((Query().is_reminder == is_reminder)) return result if is_todolist: if not list_name: # if user don't remember the list name. result = self.db.search((Query().is_todolist == is_todolist)) if not result: return None name_keeper = [] for row in result: if row['list_name'] in name_keeper: pass else: name_keeper.append(row['list_name']) response = "" for row in name_keeper: response += row + ",\n" return response result = self.db.search((Query().is_todolist == is_todolist) & (Query().list_name == list_name)) if not result: return None # for the recursive compare answer = "" for row in result: answer += "item " + str(row['list_sequence']) + ": " + row['note'] + ". \n" return answer result = self.db.search((Query().is_todolist == is_todolist) & (Query().is_reminder == is_reminder)) if not result: return "There is no note" counter = 0 answer = "" for row in result: counter += 1 answer += "note " + str(counter) + ": " + row['note'] + ". \n" return answer def db_upsert(self, note, category=None, remind_time_stamp=None, list_name=None, list_sequence=None, is_todolist=False, is_reminder=False, is_active=False, is_public=True, user_id=None): """Function to insert(or update) a note record to the database. Args: note (str): note that extracted from the user's input/command. com (str): User's command. Keyword Args: is_reminder (int): Is it a note for remind? (default: False) is_public (int): Is it a public note? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: notepad = self.db_session.query(NotePad).filter(NotePad.note == note, NotePad.is_todolist == is_todolist, NotePad.list_name == list_name, NotePad.list_sequence == list_sequence, NotePad.is_reminder == is_reminder, NotePad.user_id == user_id, NotePad.is_public == is_public, NotePad.category == category, NotePad.remind_time_stamp == remind_time_stamp, NotePad.is_active == is_active).one_or_none() if not notepad: new_notepad = NotePad(note=note, is_todolist=is_todolist, list_name=list_name, list_sequence=list_sequence, is_reminder=is_reminder, user_id=user_id, is_public=is_public, category=category, remind_time_stamp=remind_time_stamp, is_active=is_active) self.db_session.add(new_notepad) self.db_session.commit() else: notepad.counter += 1 self.db_session.commit() else: if (not is_reminder and not is_todolist) or (is_todolist and not is_reminder): if not self.db.search((Query().note == note)): # if there is no exacty record on the database then self.db.insert({ 'note': note, 'category': category, 'is_reminder': is_reminder, 'list_name': list_name, 'is_todolist': is_todolist, 'remind_time_stamp': remind_time_stamp, 'list_sequence': list_sequence }) # insert the given data elif is_reminder and not is_todolist: if not self.db.search((Query().note == note)): # if there is no exact record on the database then pass else: while self.db.search((Query().note == note)): self.db.remove((Query().note == note)) self.db.insert({ 'note': note, 'category': category, 'is_reminder': is_reminder, 'list_name': list_name, 'is_todolist': is_todolist, 'remind_time_stamp': remind_time_stamp, 'list_sequence': list_sequence, 'is_active': is_active }) # insert the given data else: pass # the note is to do list and reminder both at the same time. This compare will using on future. return "" def db_delete(self, note=None, category=None, are_all=False, list_name=None, list_sequence=None, is_todolist=False, is_reminder=False, is_active=False, is_public=True, user_id=None): """Function to delete a note record from the database. NOT COMPLETED. Args: note (str): note that extracted from the user's input/command. Keyword Args: is_reminder (int): Is it a note for remind? (default: False) is_public (int): Is it a public record? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: if not is_public and user_id: notepad = self.db_session.query(NotePad).filter(NotePad.note == note, NotePad.user_id == user_id).one_or_none() if not notepad: return "I don't remember anything about " + self.mirror(note) else: notepad.delete() self.db_session.commit() return "OK, I forgot everything I know about " + self.mirror(note) else: return "I cannot forget a general note about " + self.mirror(note) else: if are_all: self.db.remove((Query().is_todolist == is_todolist) | (Query().is_reminder == is_reminder)) #İf added the "to do list for remind" to the future, this line will be reworked. return "" if self.db.remove((Query().is_todolist == is_todolist) & (Query().is_reminder == is_reminder)): return "" else: return "There is no note."
'name': 'lee', 'email': '*****@*****.**' }, { 'name': 'park', 'email': '*****@*****.**' }]) #JsonArray 삽입 SQL = Query() #데이터 수정 el = db.get(SQL.name == 'kim') #id값 출력 print(el) print(el.doc_id) db.update({'email': '*****@*****.**'}, doc_ids=[3]) db.update({'email': '*****@*****.**'}, doc_ids=[1, 2, 3]) #데이터 수정 & 추가 db.upsert({'email': '*****@*****.**', 'login': True}, SQL.name == 'kim') #데이터 삭제 db.remove(doc_ids=[2, 3]) db.remove(SQL.name == 'park') #전체 조회 print(db.all()) #접속 종료 db.close()
class Database: def __init__(self): if os.path.exists('db.json'): print('DB exists') self.db = TinyDB('db.json') else: with open('propertiesData.json') as jd: data = json.load(jd) self.db = TinyDB('db.json') for item in data: self.db.insert(item) def incrementLanded(self, landed): Space = Query() prop = self.db.search(Space.Name == landed) for doc in prop: doc['Landed'] += 1 self.db.write_back(prop) def purgeDocuments(self): self.db.purge() os.remove('db.json') def createGraph(self): name = [] timesLanded = [] costs = [] rents = [] self.removeDataPoints() # Removes All non-properties prop = self.db.all() for doc in prop: name.append(doc['Name']) timesLanded.append(doc['Landed']) costs.append(doc['Cost']) rents.append(doc['Rent']) timesLanded = self.convertToPercent(timesLanded) _graph = Graph(name, timesLanded, costs, rents) #_graph.barChart() _graph.lineChart() def convertToPercent(self, values): timesLanded = [] total = 0 for value in values: total += value for num in values: percent = (num / total) * 100 timesLanded.append(round(percent, 2)) return timesLanded def removeDataPoints(self): Place = Query() self.db.remove(Place.Name == 'Chance') self.db.remove(Place.Name == 'GO') self.db.remove(Place.Name == 'Community Chest') self.db.remove(Place.Name == 'Jail') self.db.remove(Place.Name == 'Free Parking') self.db.remove(Place.Name == 'GO TO Jail') self.db.remove(Place.Name == 'Income Tax') self.db.remove(Place.Name == 'Luxury Tax')
class DBHandler: """Class that make the link between the DB and repator.""" @staticmethod def auditors(): """Default constructor for Auditors database.""" return DBHandler(DB_LOCAL_FILES["auditors"], DB_AUDITORS_DEFAULT) @staticmethod def clients(): """Default constructor for Clients database.""" return DBHandler(DB_LOCAL_FILES["clients"], DB_CLIENTS_DEFAULT) @staticmethod def vulns(): """Default constructor for Vulns database.""" return DBHandler(DB_LOCAL_FILES["vulns"], DB_VULNS_DEFAULT) @staticmethod def vulns_git(): """Default constructor for Vulns taken from git database.""" return DBHandler(DB_GIT_LOCAL_FILES["vulns"], DB_VULNS_DEFAULT) @staticmethod def auditors_git(): """Default constructor for Vulns taken from git database.""" return DBHandler(DB_GIT_LOCAL_FILES["auditors"], DB_AUDITORS_DEFAULT) @staticmethod def clients_git(): """Default constructor for Vulns taken from git database.""" return DBHandler(DB_GIT_LOCAL_FILES["clients"], DB_CLIENTS_DEFAULT) def __init__(self, db_path, default_values=None): if not path.exists(path.dirname(db_path)): mkdir(path.dirname(db_path), 0o750) new_db = not path.isfile(db_path) self.path = db_path self.default_values = default_values if default_values else {} self.database = TinyDB(db_path, indent=2, object_pairs_hook=collections.OrderedDict) if new_db: self.insert_record(default_values) else: for name, value in default_values.items(): self.insert_column(name, value) def insert_column(self, name, value): """Creates a new column in the database.""" values = self.get_all() cols = {name: value} ids = [] for record in values: if name in record: return False # column already exists ids.append(record.doc_id) self.database.update(cols, doc_ids=ids) return True def insert_record(self, dictionary=None): """Adds a new record to the database.""" if dictionary is None: dictionary = collections.OrderedDict(self.search_by_id(1)) first_lang = True if self.path == DB_LOCAL_FILES["vulns"]: # Adds the keys which are different according to the languages for lang in LANGUAGES: if first_lang: first_lang = False else: for elem in DB_VULNS_DIFFERENT_LANG: dictionary[elem + lang] = dictionary[elem] return self.database.insert(dictionary) def insert_multiple(self, dictionary): """Insertion of multiple entries.""" return self.database.insert_multiple(dictionary) def get_all(self): """Gets all records but the first one which is a sample record.""" return self.database.all()[1:] def search(self, name, value): """Implements the search method of TinyDB.""" query = Query() return self.database.search(query[name] == value) def search_by_id(self, id_): """Searches for a document with the id id_ in the database.""" return self.database.get(doc_id=id_) def update(self, id_, name, value): """Modifies the corresponding record in the database.""" record = self.search_by_id(id_) if record is None: return False record[name] = value return self.database.update(record, doc_ids=[id_]) def delete(self, id_): """Removes the corresponding record from the database.""" return self.database.remove(doc_ids=[id_]) def purge(self): """Purges the database and adds the default values to the newly created database.""" self.database.purge() self.insert_record(self.default_values) def close(self): """Implements the close method of TinyDB.""" self.database.close()
class Labels(commands.Cog): def __init__(self, bot): self.bot = bot self.database = TinyDB(config.database_labels) self.query = Query() @commands.command(ame="set", description="sets a new entry in the database") async def set(self, ctx, label, *, value): try: label = label.lower() if self.database.contains(self.query.iid == label): response = ( 'Label already exists, you can !update the label with a new value' ) await sender(ctx, response) else: self.database.insert({'iid': label, 'url': value}) response = ( 'Label set, you can !update the value or !remove the label' ) await sender(ctx, response) logger.info('{0} set the label {1} to {2}'.format( ctx.message.author.name, label, value)) return except Exception as e: logger.error("!set command failed with error: " + str(e) + ", User: "******", Content: " + str(ctx.message.content)) @commands.command( name="get", description="Gets an entry from the database: aliases: show, get", aliases=["show"]) async def get(self, ctx, label): try: label = label.lower() if not self.database.contains(self.query.iid == label): response = ( 'Label not found, you can !set the label with a value') await sender(ctx, response) else: response = self.database.get(self.query.iid == label)['url'] await sender(ctx, response) logger.info('{0} called the label {1}'.format( ctx.message.author.name, label)) return except Exception as e: logger.error("!get command failed with error: " + str(e) + ", User: "******", Content: " + str(ctx.message.content)) @commands.command(name="update", description="Updates an entry in the database") async def update(self, ctx, label, *, value): try: label = label.lower() if not self.database.contains(self.query.iid == label): self.database.contains({'url': value}, self.query.iid == label) response = ('Label not found') await sender(ctx, response) return elif self.database.contains(self.query.iid == label): self.database.update({'url': value}, self.query.iid == label) response = ( 'Updated label, you can !set the label with a value or !remove the label' ) await sender(ctx, response) logger.info('{0} updated the label {1} to {2}'.format( ctx.message.author.name, label, value)) return except Exception as e: logger.error("!update command failed with error: " + str(e) + ", User: "******", Content: " + str(ctx.message.content)) @commands.command( name="remove", description= "Removes an entry from the database: aliases: remove, delete", aliases=["delete"]) async def remove(self, ctx, label): try: label = label.lower() if not self.database.contains(self.query.iid == label): response = ( 'Label not found, you can !set the label with a value') await sender(ctx, response) else: label = self.database.remove(self.query.iid == label) response = ('Label removed') await sender(ctx, response) logger.info('{0} removed the label {1}'.format( ctx.message.author.name, label)) return except Exception as e: logger.error("!remove command failed with error: " + str(e) + ", User: "******", Content: " + str(ctx.message.content)) @commands.command(name="labels", description="Shows all set labels in the database") async def labels(self, ctx): try: embed = discord.Embed(title="All labels in the database") dbdump = self.database.all() label_list = [] for item in dbdump: label_value = re.match(r"(({'iid':\s')(.*?)')", str(item)) label_list.append(label_value.group(3)) label_list = sorted(label_list) label_list = (' - '.join(label_list)) embed.description = "{0}".format(label_list) await ctx.send(embed=embed) except Exception as e: logger.error("!labels command failed with error: " + str(e) + ", User: " + str(ctx.message.author))
class Config(): """操作 config.json 文件""" def __init__(self, event=None): self._init(event) def __enter__(self, event=None): self._init(event) return self def __exit__(self, exc_type, exc_val, exc_tb): self.config.close() def _init(self, event=None): self.config = TinyDB(get_path('config.json'), encoding='utf-8') self.uids = self.config.table('uids') self.groups = self.config.table('groups') self.uid_lists = self.config.table('uid_lists') self.version = self.config.table('version') if event: self.bot_id = event.self_id self.type = event.detail_type self.type_id = str(event.group_id) if event.group_id else str(event.user_id) def uid_exist(self, uid, type_id=False): q = Query() if type_id: r = self.config.get((q.uid == uid) & (q.type == self.type) & (q.type_id == self.type_id)) else: r = self.config.get((q.uid == uid)) return r def add_admin(self): if self.type == 'private': return q = Query() if not self.groups.contains(q.group_id == self.type_id): self.groups.insert({'group_id': self.type_id, 'admin': True}) def update_uid_lists(self): if 'uid_lists' not in self.config.tables(): self.uid_lists.insert_multiple([ { 'dynamic': [], 'index': 0 }, { 'live': [], 'index': 0 } ]) q = Query() r = self.config.search(q.dynamic == True) dynamic = list(set([c['uid'] for c in r])) self.uid_lists.update({'dynamic': dynamic}, q.dynamic.exists()) r = self.config.search(q.live == True) live = list(set([c['uid'] for c in r])) self.uid_lists.update({'live': live}, q.live.exists()) async def add_uid(self, uid): """添加主播""" r = self.uid_exist(uid, True) if r: # 已经存在 return f"请勿重复添加 {r['name']}({r['uid']})" self.add_admin() r = self.uid_exist(uid) if r: # 当前账号没订阅,但是其他账号添加过这个 uid name = r['name'] else: # 没有账号订阅过这个 uid api = BiliAPI() try: # 检测 uid 是否有效(逻辑需要修改) user_info = await api.get_info(uid) name = user_info["name"] except: return "请输入有效的uid" self.config.insert({ 'uid': uid, 'name': name, 'type': self.type, 'type_id': self.type_id, 'live': True, 'dynamic': True, 'at': False, 'bot_id': self.bot_id }) self.update_uid_lists() return f"已添加 {name}({uid})" async def delete_uid(self, uid): """删除主播""" r = self.uid_exist(uid, True) if not r: return "删除失败,uid 不存在" q = Query() self.config.remove( (q.uid == uid) & (q.type == self.type) & (q.type_id == self.type_id)) self.update_uid_lists() return f"已删除 {r['name']}({uid})" async def uid_list(self): """主播列表""" q = Query() r = self.config.search((q.type == self.type) & (q.type_id == self.type_id)) message = "以下为当前的订阅列表:\n\n" for c in r: message += ( f"【{c['name']}】" + f"直播推送:{'开' if c['live'] else '关'}," + f"动态推送:{'开' if c['dynamic'] else '关'}" + f"({c['uid']})\n" ) return message async def set(self, func, uid, status): """开关各项功能""" if func == 'at' and self.type == 'private': return "只有群里才能name" r = self.uid_exist(uid, True) if not r: return "name失败,uid 不存在" if r[func] == status: return "请勿重复name" q = Query() self.config.update({func: status}, (q.uid == uid) & (q.type == self.type) & (q.type_id == self.type_id)) self.update_uid_lists() return f"已name,{r['name']}({r['uid']})" async def set_permission(self, status): """设置权限""" if self.type == 'private': return "只有群里才能name" q = Query() r = self.groups.get(q.group_id == self.type_id) if (not r and status) or (r and r['admin'] == status): return "请勿重复name" if not self.groups.contains(q.group_id == self.type_id): self.groups.insert({'group_id': self.type_id, 'admin': status}) else: self.groups.update({'admin': status}, q.group_id == self.type_id) return "已name,只有管理员才能使用" if status else "已name,所有人都能使用" def next_uid(self, func): """获取下一位爬取的 uid""" q = Query() r = self.uid_lists.get(q[func].exists()) if not r: # 一次都没有添加过,uid_list 还没有创建 return None index = r['index'] uid_list = r[func] if not uid_list: # uid_list 为空 return None if index >= len(uid_list): uid = uid_list[0] index = 1 else: uid = uid_list[index] index += 1 self.uid_lists.update({'index': index}, q[func].exists()) return uid def get_push_list(self, uid, func): """获取推送列表""" q = Query() return self.config.search((q.uid == uid) & (q[func] == True)) def get_admin(self, group_id): q = Query() if not self.groups.contains(q.group_id == group_id): return True return self.groups.get(q.group_id == group_id)['admin'] @classmethod def get_name(cls, uid): """获取 uid 对应的昵称""" q = Query() return (cls().config.get(q.uid == str(uid)))['name'] def read(self): """读取用户注册信息""" with open(get_path('config.json'), encoding='utf-8-sig') as f: text = f.read() self.json = json.loads(text) return self.json def backup(self): """备份当前配置文件""" # FIXME 如果 config.json 不存在,不备份 self.read() backup_name = f"config.{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.json.bak" with open(get_path(backup_name), 'w', encoding='utf-8') as f: f.write(json.dumps(self.json, ensure_ascii=False, indent=4)) return True def new_version(self): if 'version' not in self.config.tables(): self.version.insert({'version': __version__}) return True current_version = Version(__version__) old_version = Version(self.version.all()[0]['version']) return current_version > old_version def update_version(self): self.version.update({'version': __version__}) @classmethod async def update_config(cls): """升级为 TinyDB""" with Config() as config: if 'status' in config.config.tables(): config.backup() config.config.drop_tables() for c_type, config_type in {'group': 'groups', 'private': 'users'}.items(): config.type = c_type for type_id, type_config in config.json[config_type].items(): config.type_id = type_id uids = type_config['uid'] for uid, sets in uids.items(): config.bot_id = config.json['uid'][uid][config_type][config.type_id] await config.add_uid(uid) for func, status in sets.items(): await config.set(func, uid, status) if 'admin' in type_config and not type_config['admin']: await config.set_permission(False) if config.new_version(): config.backup() config.update_version()
class GNGraphConfigModel: query = Query() def __init__(self, db_path): dbpath = os.path.join(db_path, 'gngraph_config_settings.json') if (os.path.exists(dbpath)): self._db = TinyDB(dbpath) else: def_dict = {'sfmode': 1, 'dbmode': 0} self._db = TinyDB(dbpath) self.insert_op(def_dict) def req_fields_json(self, dict_result): req_items = ['sfmode', 'dbmode'] return { key: value for key, value in dict_result.items() if key in req_items } def search_op(self, req_dict): return True if self.search_res(1) else False def search_res(self, id): return self._db.get(doc_id=id) def update_op(self, req_dict): self._db.update( { 'sfmode': req_dict['sfmode'], 'dbmode': req_dict['dbmode'] }, doc_id == 1) def insert_op(self, req_dict): if not self.search_op(1): self._db.insert(req_dict) return self._db.all() else: self.update_op(req_dict) return "None_Insert" def upsert_op(self, req_dict): ##self._db.upsert(req_dict, GNGraphDBConfigModel.query.serverIP == req_dict['serverIP']) self._db.upsert(Document(req_dict, doc_id=1)) def delete_op(self, req_dict): if self.search_op(req_dict): self._db.remove(where('serverIP') == req_dict['serverIP']) return self._db.all() return "None_Delete" def get_op(self): return self._db.get(doc_id=1) def update_op(self, req_dict): if not self.search_res(1): return False self._db.update( { 'sfmode': req_dict['sfmode'], 'dbmode': req_dict['dbmode'] }, GNGraphConfigModel.query.id == 1) return self._db.all() def stop_db(self): self._db.close()
<input type="hidden" name="status" value="{request['status']}"> <input type="hidden" name="reason" value="{request['reason']}"> <input type="hidden" name="need_rsicc" value="{request['need_rsicc']}"> <input type="hidden" name="rsicc_n" value="{request['rsicc_n']}"> <input type="hidden" name="sw_scale" value="{request['sw_scale']}"> <input type="hidden" name="sw_mcnp" value="{request['sw_mcnp']}"> <input type="hidden" name="sw_serpent" value="{request['sw_serpent']}"> <input type="hidden" name="sw_other" value="{request['sw_other']}"> <input type="hidden" name="confirmed" value="1"> <input type="submit" value="All is OK, CONFIRM!"> </form></fieldset>''') if validated and confirmed == '1': # Remove previous entries from the database if db.search(User.netid == request['netid']): db.remove(User.netid == request['netid']) # Insert new entry request['time'] = my_time_now db.insert(request) print(f'''<h2> Thank you {request['first_name']}!</h2>''') mail_body = f'''To: {email_admin} From: NEcluster Accounts <*****@*****.**> Subject: NE Cluster account request: {request['netid']} netid {request['netid']} first_name {request['first_name']} last_name {request['last_name']} status {request['status']} reason {request['reason']} need_rsicc {request['need_rsicc']}
class Model(object): table = 'default' _exclude_fields = [ 'db', 'table', 'submit', '_exclude_fields', 'exclude_fields', '_deleted_args' ] _deleted_args = list() def __init__(self, **kwargs): table = os.path.join(current_app.config.get('DB_PATH', 'gallery_db'), '%s.json' % self.table) self.db = TinyDB(table, storage = S3Storage) self.eid = Field(type = int, required = False, primary = False) exclude_fields = getattr(self, 'exclude_fields', None) if exclude_fields: self._exclude_fields += exclude_fields for key, value in kwargs.items(): if key == '_deleted_args': self._deleted_args = value if key not in self._exclude_fields: self.setattr(key, value) def all(self): rows = list() for row in self.db.all(): rows.append( self.as_obj(row) ) return rows def filter(self, **kwargs): rows = list() eids = list() for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): founds = self.db.search(where(field) == value.value) for found in founds if founds else []: if found.eid not in eids: eids.append(found.eid) rows.append( self.as_obj(found) ) return rows def get(self, eid): row = self.db.get(eid = eid) if row: return self.as_obj(row) return False def search(self, **kwargs): for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): row = self.db.search(where(field) == value.value) if row: if type(row) == list: row = row[0] return self.as_obj(row) return False def create(self): insert = self.as_dict() return self.db.insert(insert) def update(self): update = self.as_dict() for arg in self._deleted_args: try: self.db.update(delete(arg), eids = [ self.eid.value ]) except: pass return self.db.update(update, eids = [ self.eid.value ]) def save(self): if self.eid.value: self.eid.validate() return self.update() else: create = self.create() self.eid.value = create return self def delete(self): self.db.remove( eids = [ self.eid.value ] ) def as_dict(self): args = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr: if attr.validate(): args[key] = attr.value return args def clean(self): for key in self.__dict__.keys(): if key not in self._exclude_fields: delattr(self, key) def as_obj(self, row): self.clean() if not getattr(self, 'eid', None): self.eid = Field(value = row.eid, type = int, required = False, primary = False) for key, value in row.items(): self.setattr(key, value) return copy.copy( self ) def setattr(self, key, value): attr = getattr(self, key, Field()) if type(attr) != Field: attr = Field() attr.value = value if key not in self._exclude_fields: setattr(self, key, attr) return attr if key == '_deleted_args': self._deleted_args.append(value) return False def from_form(self, form): for key, value in form.items(): self.setattr(key, value) return self def as_form(self): fields = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr and type(attr) == Field: fields[key] = attr return fields def __repr__(self): if self.eid: return '<%s: %s>' % (self.__class__.__name__, self.eid.value) else: return '<%s>' % (self.__class__.__name__)
def output(x): print("") print(x) db = TinyDB("summaries.json") # cleanup: db.purge() # insert db.insert({"type": "apple", "count": 7}) db.insert({"type": "peach", "count": 3}) # query output(db.all()) Fruit = Query() output(db.search(Fruit.type == "peach")) output(db.search(Fruit.count >= 3)) db.update({"count": 10}, Fruit.type == "apple") output(db.search(Fruit.type == "apple")) db.remove(Fruit.count < 5) print(db.all())
class Reminder(commands.Cog): """ Set a Reminder for text in given amount of time """ def __init__(self, bot, loop=asyncio): self.bot = bot self.reminders = TinyDB('data/reminders.db') self.units = {"m": 60, "h": 3600, "d": 86400, "w": 604800} self.times = ['minute', 'hour', 'day', 'week'] self.loop = asyncio.get_event_loop() self.bg_task = self.loop.create_task(self.do_reminder()) def add_db(self, data): # TODO: Fix this... try: self.reminders.insert(data) return True except Exception as e: return False def rm_db(self, rec): # TODO: Encapsulate db code in to rm_db() pass def view_db(self, user_id): # TODO: Encapsulate viewing of db records in to view_db() pass @commands.command(pass_context=True, aliases=['reminders', 'set_reminder', 'add_reminder']) async def reminder(self, ctx, quantity: int, time_unit: str, *, text: str): """Set a reminder - Usage: !reminder <int> <minutes/hours/days/weeks> <reminder message""" time_prefix = time_unit.lower()[:1] author = ctx.message.author s = '' if time_unit.endswith("s"): time_unit = time_unit[:-1] s = "s" if time_unit not in self.times: await ctx.send( "Invalid time unit. Choose minutes/hours/days/weeks/month") return if not quantity or quantity < 1: await ctx.send("Quantity must not be 0 or negative.") return if len(text) > 1960: await ctx.send("Text is too long.") return seconds = self.units[time_prefix] * quantity future = int(time.time() + seconds) data = { "id": author.id, "remind_at": future, "time": str(quantity) + ' ' + time_unit, "message": text } if self.reminders.insert(data): logger.info(f"{author.name} ({author.id}) set a reminder.") await ctx.send( f"I will remind you of that in {str(quantity)} {time_unit}{s}." ) else: await ctx.send("Something went wrong.") @reminder.error async def reminder_error(self, ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send("You must supply the correct arguments") logger.error(f'An error occurred: {error} - {ctx.message.author}') @commands.command(pass_context=True) async def clear_reminders(self, ctx): """Removes all your upcoming reminders""" author = ctx.message.author removable = [] for reminder in self.reminders: if reminder["id"] == author.id: removable.append(reminder) if not removable == []: for item in removable: self.reminders.remove(doc_ids=[item.doc_id]) await ctx.send("Your reminders have been cleared") logger.info(f"{author.name} ({author.id}) cleared all reminders.") else: await ctx.send("You don't have any reminders.") @commands.command(pass_context=True) async def view_reminders(self, ctx): """View all your upcoming reminders""" author = ctx.message.author id_list = [] time_list = [] msg_list = [] if len(self.reminders) > 0: for reminder in self.reminders: if reminder['id'] == author.id: id_list.append(str(reminder.doc_id)) time_list.append(reminder['time']) msg_list.append(reminder['message']) else: return embed = discord.Embed(colour=discord.Colour.dark_grey(), title="Reminders") embed.add_field(name='#', value=str(id_list).replace(',', '\n').replace( '[', '').replace(']', '').replace('\'', ''), inline=True) embed.add_field(name='Time', value=str(time_list).replace(',', '\n').replace( '[', '').replace(']', '').replace('\'', '') + 's', inline=True) embed.add_field(name='Reminder', value=str(msg_list).replace(',', '\n').replace( '[', '').replace(']', '').replace('\'', ''), inline=True) await ctx.send(embed=embed) else: await ctx.send("Reminder database is empty") async def do_reminder(self): await self.bot.wait_until_ready() while not self.bot.is_closed(): to_remove = [] if len(self.reminders) > 0: for reminder in self.reminders: if reminder["remind_at"] <= int(time.time()): user = await self.bot.fetch_user(int(reminder['id'])) dm_channel = user.dm_channel if not dm_channel: dm_channel = await user.create_dm() print(dm_channel) await dm_channel.send( f"You asked me to remind you with this message:\n{reminder['message']}" ) to_remove.append(reminder.doc_id) logger.info( f"{user.name} ({user.id}) reminder sent at {time.time()}." ) self.reminders.remove(doc_ids=to_remove) await asyncio.sleep(5)
class GNGraphDBConfigModel: query = Query() def __init__(self, db_path): dbpath = os.path.join(db_path, 'gngraph_pgres_dbcreds.json') if (os.path.exists(dbpath)): self._db = TinyDB(dbpath) else: def_dict = { 'serverIP': '', 'serverPort': '', 'username': '', 'password': '', 'dbname': '' } self._db = TinyDB(dbpath) self.insert_op(def_dict) def req_fields_json(self, dict_result): req_items = [ 'serverIP', 'serverPort', 'username', 'password', 'dbname' ] return { key: value for key, value in dict_result.items() if key in req_items } def search_op(self, req_dict): return True if self.search_res(1) else False def search_res(self, id): return self._db.get(doc_id=id) def insert_op(self, req_dict): if not self.search_op(req_dict): self._db.insert(req_dict) return self._db.all() return "None_Insert" def upsert_op(self, req_dict): ##self._db.upsert(req_dict, GNGraphDBConfigModel.query.serverIP == req_dict['serverIP']) self._db.upsert(Document(req_dict, doc_id=1)) def delete_op(self, req_dict): if self.search_op(req_dict): self._db.remove(where('serverIP') == req_dict['serverIP']) return self._db.all() return "None_Delete" def get_op(self): ###return self._db.get(GNGraphDBConfigModel.query.id == 1) return self._db.get(doc_id=1) def update_rec(self, req_dict): self._db.update( { 'serverIP': req_dict['serverIP'], 'serverPort': req_dict['serverPort'], 'username': req_dict['username'], 'password': req_dict['password'], 'dbname': req_dict['dbname'] }, GNGraphDBConfigModel.query.id == 1) def update_op(self, old_srv_IP, req_dict): if not self.search_res(old_srv_IP): return False self._db.update( { 'serverIP': req_dict['serverIP'], 'serverPort': req_dict['serverPort'], 'username': req_dict['username'], 'password': req_dict['password'] }, GNGraphDBConfigModel.query.serverIP == old_srv_IP) return self._db.all() def stop_db(self): self._db.close()
class Plugin(object): def __init__(self, pm): self.pm = pm self.modulename = 'wikia' self.configPath = 'pluginsconfig/data_config-{0}_a.json'.format(self.modulename) self.configDB = TinyDB(self.configPath) @staticmethod def register_events(): return [ Events.Command("wikia", Ranks.Default, "[wiki]/[page]/[subsec?] Displays a wikia page. Provide a subcategory for more information."), Events.Command("findwiki", Ranks.Default, "[wiki]/[searchterm] search for a wikia page"), Events.Command("wikia.allow", Ranks.Admin), Events.Command("wikia.block", Ranks.Admin)] async def handle_command(self, message_object, command, args): try: print("--{2}--\n[Noku-macro] {0} command from {1} by {3}".format(command, message_object.channel.name, arrow.now().format('MM-DD HH:mm:ss'), message_object.author.name)) except: print("[Noku]Cannot display data, probably emojis.") if self.configDB.contains(Query().chanallow == message_object.channel.id): ''' Add modules checks here ''' if command == "wikia": await self.displaypage(message_object, args[1]) #Do not modify or add anything below it's for permissions if command == "{0}.allow".format(self.modulename): await self.allowChan(message_object) if command == "{0}.block".format(self.modulename): await self.blockChan(message_object) ''' Add modules here ''' async def displaypage(self, message_object, args): elements = args.split("/") if len(elements) > 1: try: print ("[a]") status = await self.pm.client.send_message(message_object.channel, ':information_source:`Looking up wikia page~`'.format()) print ("[b]") await self.pm.client.send_typing(message_object.channel) print ("[c]") page = wikia.page(elements[0], elements[1]) url = page.url print ("[d]") if len(elements) == 2: print ("[e]") header = '{0} > {1}'.format(elements[0], elements[1]) content = page.summary print ("[e.5]") else: print ("[f]") header = '{0} > {1} > {2}'.format(elements[0], elements[1], elements[2]) content = page.section(elements[2]) print ("[f.5]") except: try: print ("[search]") search = wikia.search(elements[0], elements[1]) print ("[search.1]") results = "" i = 1 print ("[search.2]") for x in search: results = results + "{0}: {1}\n".format(i, x) i += 1 print ("[search.3]") await self.pm.client.edit_message(status, ":information_source:**No page found, here's the search results instead**\n```{0}```\n*Select the page you want to view by responding with a number*".format(results)) print ("[search.4]") response = await self.pm.client.wait_for_message(author=message_object.author) print ("[search.7]") try: page = wikia.page(elements[0], search[int(response.content) - 1]) print ("[search.8]") header = '{0} > {1}'.format(elements[0], search[int(response.content) - 1]) print ("[search.9]") content = page.summary print ("[search.10]") url = page.url print ("[search.11]") except: await self.pm.client.edit_message(status, ":exclamation:`Invalid Selection!`".format()) return except: await self.pm.client.edit_message(status, ":exclamation:`Invalid Wikia or no results found!`".format()) return print ("[display.1]") tags = "" for x in page.sections: tags = tags + x + ', ' print ("[display.1.5]") if len(content) > 1000: content = content[:1000]+"..." print ("[display.2]") em = discord.Embed(title='', description="**Summary**\n{0}\n\n**Sub Sections**\n{1}\n\n**Link**\n{2}".format(content, tags, url), colour=0x007AFF, url=url) em.set_author(name=header) em.set_footer(text="Noku-wikia version 1.0.5", icon_url=self.pm.client.user.avatar_url) print ("[display.3]") if len(page.images) > 0: em.set_thumbnail(url=page.images[0]) #print(content) print ("[display.4]") try: await self.pm.client.send_message(message_object.channel, embed=em) except: await self.pm.client.send_message(message_object.channel, "***{3}***\n\n**Summary**\n{0}\n\n**Sub Sections**\n{1}\n\n**Link**\n{2}".format(content, tags, url, header)) try: await self.pm.client.delete_message(status) await self.pm.client.delete_message(response) except: pass #except: #await self.pm.client.send_message(message_object.channel,":exclamation:`Something went terribly wrong.`") # print ("Sum error happened.") # pass #Do not modify or add anything below it's for permissions async def allowChan(self, message_object): self.configDB.insert({'chanallow' : message_object.channel.id}); await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been allowed access to {0}`'.format(message_object.channel.name, self.modulename)) async def blockChan(self, message_object): self.configDB.remove(Query().chanallow == message_object.channel.id); await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been blocked access to {0}`'.format(message_object.channel.name, self.modulename))
class Learn(): def __init__(self, nlp): self.pronouns = collections.OrderedDict() # Create an ordered dictionary self.pronouns["I"] = "YOU" self.pronouns["ME"] = "YOU" self.pronouns["MY"] = "YOUR" self.pronouns["MINE"] = "YOURS" self.pronouns["MYSELF"] = "YOURSELF" self.pronouns["OUR"] = "YOUR" self.pronouns["OURS"] = "YOURS" self.pronouns["OURSELVES"] = "YOURSELVES" self.pronouns["WE"] = "YOU" self.pronouns["US"] = "YOU" self.inv_pronouns = collections.OrderedDict() # Create an ordered dictionary self.inv_pronouns["YOU"] = "I" self.inv_pronouns["YOUR"] = "MY" self.inv_pronouns["YOURS"] = "MINE" self.inv_pronouns["YOURSELF"] = "MYSELF" self.inv_pronouns["YOURSELVES"] = "OURSELVES" self.auxiliaries = collections.OrderedDict() # Create an ordered dictionary self.auxiliaries["AM"] = "ARE" self.auxiliaries["'M"] = " ARE" self.auxiliaries["WAS"] = "WERE" self.inv_auxiliaries = collections.OrderedDict() # Create an ordered dictionary self.inv_auxiliaries["ARE"] = "AM" self.inv_auxiliaries["WERE"] = "WAS" home = expanduser("~") # Get the home directory of the user self.db = TinyDB(home + '/.dragonfire_db.json') # This is where we store the database; /home/USERNAME/.dragonfire_db.json self.nlp = nlp # Load en_core_web_sm, English, 50 MB, default model # Entry function for this class. Dragonfire calls only this function. It does not handle TTS. def respond(self, com): doc = self.nlp(com) # Command(user's speech) must be decoded from utf-8 to unicode because spaCy only supports unicode strings, self.nlp() handles all parsing subject = [] # subject list (subjects here usually are; I'M, YOU, HE, SHE, IT, etc.) types = [] # types of the previous noun phrases types.append("") for np in doc.noun_chunks: # Iterate over the noun phrases(chunks) TODO: Cover 'dobj' also; doc = nlp(u'DESCRIBE THE SUN') >>> (u'THE SUN', u'SUN', u'dobj', u'DESCRIBE') types.append(np.root.dep_) # Purpose of this if statement is completing possessive form of nouns if np.root.dep_ == 'pobj' and types[-2] == 'nsubj': # if it's an object of a preposition and the previous noun phrase's type was nsubj(nominal subject) then (it's purpose is capturing subject like MY PLACE OF BIRTH) subject.append(np.root.head.text) # append the parent text from syntactic relations tree (example: while nsubj is 'MY PLACE', np.root.head.text is 'OF') subject.append(np.text) # append the text of this noun phrase (example: while nsubj is 'MY PLACE', np.text is 'BIRTH') if np.root.dep_ == 'nsubj' and types[-2] not in ['pobj', 'nsubj'] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']: # if it's a nsubj(nominal subject) ("wh-" words can be considered as nsubj(nominal subject) but they are out of scope. This is why we are excluding them.) subject.append(np.text) # append the text of this noun phrase if np.root.dep_ == 'attr' and types[-2] not in ['pobj', 'nsubj'] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']: # if it's an attribute and the previous noun phrase's type was not nsubj(nominal subject) subject.append(np.text) # append the text of this noun phrase if np.root.dep_ == 'dobj' and types[-2] not in ['pobj', 'nsubj'] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']: # if it's a dobj(direct object) and the previous noun phrase's type was not nsubj(nominal subject) subject.append(np.text) # append the text of this noun phrase subject = [x for x in subject] subject = ' '.join(subject).strip() # concatenate all noun phrases found if subject: # if the subject is not empty wh_found = False for word in doc: # iterate over the each word in the given command(user's speech) if word.tag_ in ['WDT', 'WP', 'WP$', 'WRB']: # check if there is a "wh-" question (we are determining that if it's a question or not, so only accepting questions with "wh-" form) wh_found = True if wh_found: # if that's a question straight = self.db_getter(subject) # get the answer from the database if straight is None: return self.db_getter(subject, True) # if nothing found then invert return straight else: verb_found = False verbtense = None # verbtense is the am/is/are of the main sentence clause = [] # is the information that we need to acknowledge verbs = [] for word in doc: #print(word.text, word.lemma_, word.pos_, word.tag_, word.dep_, word.shape_, word.is_alpha, word.is_stop) if verb_found: # get the all words comes after the first verb which will be our verbtense if word.pos_ != 'PUNCT': # exclude punctuations clause.append(word.text) if word.pos_ == 'VERB' and word.is_stop and not verb_found: # if that's a verb and verb does not found yet then verb_found = True # verb is found verbtense = word.text # append it to verbtense if word.pos_ == 'VERB': verbs.append(word.text) clause = [x for x in clause] clause = ' '.join(clause).strip() # concatenate the clause # keywords to order get and remove operations on the database if any(verb in verbs for verb in self.capitalizer(["forget", "remove", "delete", "update"])): if self.db.remove(Query().subject == self.pronoun_fixer(subject)): # if there is a record about the subject in the database then remove that record and... return "OK, I forgot everything I know about " + self.mirror(subject) else: return "I don't even know anything about " + self.mirror(subject) if any(verb in verbs for verb in self.capitalizer(["define", "explain", "tell", "describe"])): return self.db_getter(subject) if verbtense: return self.db_setter(subject, verbtense, clause,com) # set the record to the database # Function to get a record from the database def db_getter(self, subject, invert=False): if invert: result = self.db.search(Query().clause == subject) # make a database search by giving subject string (inverted) else: result = self.db.search(Query().subject == subject) # make a database search by giving subject string if result: # if there is a result dictionary = {} for row in result: # iterate over the rows of the result if row['verbtense'] not in dictionary: # if the verbtense is not in the keys of the dictionary dictionary[row['verbtense']] = [] # then add it if row['clause'] not in dictionary[row['verbtense']]: # if the clause is not in the value like; dictionary['is'] dictionary[row['verbtense']].append(row['clause']) # then append the clause if invert: answer = row['subject'] # in WHO questions subject is actually the clause so we learn the subject from db else: answer = subject # the answer we will return first_verbtense = False for key, value in dictionary.items(): # iterate over the dictionary defined and assigned on above if not first_verbtense: # if the first verbtense assignment does not made yet answer += ' ' + str(key) # concatenate with a whitespace first_verbtense = True else: answer += ', ' + str(key) # otherwise concatenate with a comma + whitespace first_clause = False for clause in value: # iterate over the clauses of the key if not first_clause: # if the first verbtense assignment does not made yet answer += ' ' + clause # concatenate with a whitespace first_clause = True else: answer += ' and ' + clause # otherwise concatenate with ' AND ' return self.mirror(answer) # mirror the answer (for example: I'M to YOU ARE) else: return None # if there is no result return None # Function to set a record to the database def db_setter(self, subject, verbtense, clause, com): if not self.db.search( (Query().subject == subject) & (Query().verbtense == verbtense) & (Query().clause == clause) ): # if there is no exacty record on the database then self.db.insert({ 'subject': subject, 'verbtense': verbtense, 'clause': clause }) # insert the given data return "OK, I get it. " + self.mirror(com) # mirror the command(user's speech) and return it to say # Function to mirror the answer (for example: I'M to YOU ARE) def mirror(self, answer): result = [] types = [] types.append("") doc = self.nlp(answer) for token in doc: types.append(token.lemma_) if token.lemma_ == "-PRON-": # if it's a pronoun, mirror it if token.text.upper() in self.pronouns: result.append(self.pronouns[token.text.upper()].lower()) continue if token.text.upper() in self.inv_pronouns: result.append(self.inv_pronouns[token.text.upper()].lower()) continue if (token.lemma_ == "be" or token.dep_ == "aux") and types[-2] == "-PRON-": # if it's an auxiliary that comes right after a pronoun, mirror it if token.text.upper() in self.auxiliaries: result.append(self.auxiliaries[token.text.upper()].lower()) continue if token.text.upper() in self.inv_auxiliaries: result.append(self.inv_auxiliaries[token.text.upper()].lower()) continue result.append(token.text) for i in range(len(result)): if result[i] == "i": result[i] = "I" result = ' '.join(result) # concatenate the result result = result.replace(" '", "'") # fix for situations like "I 'AM", "YOU 'LL" return result # Pronoun fixer to handle situations like YOU and YOURSELF def pronoun_fixer(self, subject): # TODO: Extend the context of this function if subject == "yourself": return "you" elif subject == "Yourself": return "You" elif subject == "YOURSELF": return "YOU" else: return subject def capitalizer(self, array): result = [] for word in array: result.append(word) result.append(word.capitalize()) result.append(word.upper()) return result
class JobDB: """Keeps a database of jobs, with a MD5 hash that encodes the function name, version, and all arguments to the function. """ def __init__(self, path): self.db = TinyDB(path) self.lock = Lock() def get_result_or_attach(self, key, prov, running): job = Query() with self.lock: rec = self.db.get(job.prov == prov) if 'result' in rec: return 'retrieved', rec['key'], rec['result'] job_running = rec['key'] in running wf_running = rec['link'] in running.workflows if job_running or wf_running: self.db.update(attach_job(key), job.prov == prov) return 'attached', rec['key'], None print("WARNING: unfinished job in database. Removing it and " " rerunning.", file=sys.stderr) self.db.remove(eids=[rec.eid]) return 'broken', None, None def job_exists(self, prov): job = Query() with self.lock: return self.db.contains(job.prov == prov) def store_result(self, key, result): job = Query() with self.lock: if not self.db.contains(job.key == key): return self.add_time_stamp(key, 'done') with self.lock: self.db.update( {'result': result, 'link': None}, job.key == key) rec = self.db.get(job.key == key) return rec['attached'] def new_job(self, key, prov, job_msg): with self.lock: self.db.insert({ 'key': key, 'attached': [], 'prov': prov, 'link': None, 'time': {'schedule': time_stamp()}, 'version': job_msg['data']['hints'].get('version'), 'function': job_msg['data']['function'], 'arguments': job_msg['data']['arguments'] }) return key, prov def add_link(self, key, ppn): job = Query() with self.lock: self.db.update({'link': ppn}, job.key == key) def get_linked_jobs(self, ppn): job = Query() with self.lock: rec = self.db.search(job.link == ppn) return [r['key'] for r in rec] def add_time_stamp(self, key, name): def update(r): r['time'][name] = time_stamp() job = Query() with self.lock: self.db.update( update, job.key == key)
class Plugin(object): ''' Noku Image Module ''' def __init__(self, pm): self.pm = pm self.modulename = 'gallery' self.configPath = 'pluginsconfig/data_config-{0}_a.json'.format( self.modulename) self.configDB = TinyDB(self.configPath) self.client = ImgurClient("43bdb8ab21d18b9", "fcba34a83a4650474ac57f6e3f8b0750dd26ecf5") self.imagecache = {} self.schcache = {} self.buildImagePre() @staticmethod def register_events(): return [ Events.Command( "gallery.build", Ranks.Default, "Builds the gallery cache, must be run after booting the bot." ), Events.Command( "gallery.add", Ranks.Default, "[tag] [subreddit] - scans the subreddit for images and posts a random image." ), Events.Command("gallery.delete", Ranks.Default, "[tag] Deletes an image tag."), Events.Command("subimg", Ranks.Default, "[tag] - Posts a random image from a subreddit."), Events.Command("imgs", Ranks.Default, "[query] - Searches imgur for images."), Events.Command("gallery.allow", Ranks.Admin), Events.Command("gallery.block", Ranks.Admin) ] async def handle_command(self, message_object, command, args): try: print("--{2}--\n[Noku-{4}] {0} command from {1} by {3}".format( command, message_object.channel.name, arrow.now().format('MM-DD HH:mm:ss'), message_object.author.name, self.modulename)) except: print("[Noku]Cannot display data, probably emojis.") if self.configDB.contains(Query().chanallow == message_object.channel. id) or message_object.channel.is_private: ''' Add modules checks here ''' if command == "gallery.build": await self.buildImage(message_object) if command == "gallery.add": await self.addImage(message_object, args[1]) if command == "gallery.delete": await self.delImage(message_object, args[1]) if command == "imgs": await self.imgs(message_object, args[1]) if command == "subimg": if args[1] != "": await self.showImage(message_object, args[1]) else: await self.showGallery(message_object) #Do not modify or add anything below it's for permissions if command == "{0}.allow".format(self.modulename): await self.allowChan(message_object) if command == "{0}.block".format(self.modulename): await self.blockChan(message_object) ''' Add modules here ''' async def imgs(self, message_object, args): await self.pm.client.send_typing(message_object.channel) new = True while new: if args.lower() in self.schcache: if len(self.schcache[args]) > 0: image = random.choice(self.schcache[args]) if "imgur.com/a/" in image.link: image = random.choice( self.client.get_album_images(image.id)).link else: image = image.link else: image = ":information_source:`No results for your query!`" print("[Gallery]Sending: {0}".format(image)) new = False await self.pm.client.send_message(message_object.channel, image) else: self.schcache[args.lower()] = self.client.gallery_search(args) async def buildImage(self, message_object=None): status = await self.pm.client.send_message( message_object.channel, ':information_source:`Building Cache for Noku-Image`'.format()) await self.pm.client.send_typing(message_object.channel) dbCache = self.configDB.search(Query().tag != "") print("[Noku]Building cache") if dbCache != 0: for item in dbCache: print("[Noku]Retrieving Image Cache for {0}".format( item['tag'])) #self.imagecache[item['tag']] = self.client.subreddit_gallery(item['link']) #await self.pm.client.edit_message(status, ':information_source:`Noku-Image: Building {0} cache.`'.format(item['tag'])) await self.addCache(item['tag'], item['link']) #await self.pm.client.send_message(message_object.channel, ':information_source:`Cache Build complete!`'.format()) def buildImagePre(self, message_object=None): #status = await self.pm.client.send_message(message_object.channel, ':information_source:`Building Cache for Noku-Image`'.format()) #await self.pm.client.send_typing(message_object.channel) dbCache = self.configDB.search(Query().tag != "") print("[Noku]Building cache") if dbCache != 0: for item in dbCache: print("[Noku]Retrieving Image Cache for {0}".format( item['tag'])) #self.imagecache[item['tag']] = self.client.subreddit_gallery(item['link']) #await self.pm.client.edit_message(status, ':information_source:`Noku-Image: Building {0} cache.`'.format(item['tag'])) self.imagecache[item['tag']] = self.client.subreddit_gallery( item['link']) async def addCache(self, tag, link): self.imagecache[tag] = self.client.subreddit_gallery(link) async def addImage(self, message_object, args): status = await self.pm.client.send_message( message_object.channel, ':information_source:`Retreiving gallery info for Noku-Image`'. format()) if len(args.split(" ")) > 0: tag = args.split(" ")[0] subreddit = args.split(" ")[1] await self.pm.client.edit_message( status, ':information_source:`Noku-Image: Building {0} cache.`'.format( tag)) await self.pm.client.send_typing(message_object.channel) self.imagecache[tag] = self.client.subreddit_gallery(subreddit) if len(self.imagecache[tag]) > 10: self.configDB.insert({'tag': tag, 'link': subreddit}) await self.pm.client.send_message( message_object.channel, ':information_source:`Successfully generated and added tag!`' .format()) else: await self.pm.client.send_message( message_object.channel, ':exclamation:`Gallery provided is less than 10 images!`'. format()) else: await self.pm.client.send_message( message_object.channel, ':information_source: Usage:`~addgalery [tag] [subreddit]`'. format()) async def delImage(self, message_object, args): self.configDB.remove(Query().tag == args) await self.pm.client.send_message( message_object.channel, ":information_source:`{0} has been deleted! Probably..`".format( args)) async def showImage(self, message_object, args): try: image = random.choice(self.imagecache[args]) if "imgur.com/a/" in image.link: image = random.choice(self.client.get_album_images( image.id)).link else: image = image.link await self.pm.client.send_message(message_object.channel, "{0}".format(image)) except: await self.pm.client.send_message( message_object.channel, ":exclamation:`Welp, that\'s not a valid tag!`") async def showGallery(self, message_object): macros = self.configDB.search(Query().tag != "") x = "```" for m in macros: x = x + m['tag'] + " " x = x + "```" await self.pm.client.send_message(message_object.channel, x) #Do not modify or add anything below it's for permissions async def allowChan(self, message_object): self.configDB.insert({'chanallow': message_object.channel.id}) await self.pm.client.send_message( message_object.channel, ':information_source:`Noku Bot-{1} has been allowed access to {0}`' .format(message_object.channel.name, self.modulename)) async def blockChan(self, message_object): self.configDB.remove(Query().chanallow == message_object.channel.id) await self.pm.client.send_message( message_object.channel, ':information_source:`Noku Bot-{1} has been blocked access to {0}`' .format(message_object.channel.name, self.modulename))
class Learn(): def __init__(self): self.replacements = collections.OrderedDict( ) # Create an ordered dictionary self.replacements["I'M"] = "YOU-ARE" self.replacements["I-WAS"] = "YOU-WERE" self.replacements["I"] = "YOU" self.replacements["MY"] = "YOUR" self.replacements["MINE"] = "YOURS" self.replacements["MYSELF"] = "YOURSELF" self.replacements["OURSELVES"] = "YOURSELVES" home = expanduser("~") # Get the home directory of the user self.db = TinyDB( home + '/.dragonfire_db.json' ) # This is where we store the database; /home/USERNAME/.dragonfire_db.json self.nlp = spacy.load( 'en') # Load en_core_web_sm, English, 50 MB, default model # Entry function for this class. Dragonfire calls only this function. It does not handle TTS. def respond(self, com): forget = "^(?:FORGET|UPDATE) (?:EVERYTHING YOU KNOW ABOUT |ABOUT )?(?P<subject>.*)" capture = re.search(forget, com) if capture: if self.db.remove( Query().subject == self.pronoun_fixer( capture.group('subject')) ): # if there is a record about the subject in the database then remove that record and... return "OK, I FORGOT EVERYTHING I KNOW ABOUT " + self.mirror( capture.group('subject')) else: return "I DON'T EVEN KNOW ANYTHING ABOUT " + self.mirror( capture.group('subject')) define = "(?:PLEASE |COULD YOU )?(?:DEFINE|EXPLAIN|TELL ME ABOUT|DESCRIBE) (?P<subject>.*)" capture = re.search(define, com) if capture: return self.db_getter(capture.group('subject')) doc = self.nlp( com.decode('utf-8') ) # Command(user's speech) must be decoded from utf-8 to unicode because spaCy only supports unicode strings, self.nlp() handles all parsing subject = [ ] # subject list (subjects here usually are; I'M, YOU, HE, SHE, IT, etc.) prev_type = None # type of the previous noun phrase for np in doc.noun_chunks: # Iterate over the noun phrases(chunks) TODO: Cover 'dobj' also; doc = nlp(u'DESCRIBE THE SUN') >>> (u'THE SUN', u'SUN', u'dobj', u'DESCRIBE') # Purpose of this if statement is completing possessive form of nouns if np.root.dep_ == 'pobj': # if it's an object of a preposition if prev_type == 'nsubj': # and the previous noun phrase's type was nsubj(nominal subject) then (it's purpose is capturing subject like MY PLACE OF BIRTH) subject.append( np.root.head.text.encode('utf-8') ) # append the parent text from syntactic relations tree (example: while nsubj is 'MY PLACE', np.root.head.text is 'OF') subject.append( np.text.encode('utf-8') ) # append the text of this noun phrase (example: while nsubj is 'MY PLACE', np.text is 'BIRTH') prev_type = 'pobj' # assign the previous type as pobj if np.root.dep_ == 'nsubj': # if it's a nsubj(nominal subject) if np.root.tag_ != 'WP' and prev_type not in [ 'pobj', 'nsubj' ]: # "wh-" words are also considered as nsubj(nominal subject) but they are out of scope. This is why we are excluding them. subject.append(np.text.encode( 'utf-8')) # append the text of this noun phrase prev_type = 'nsubj' # assign the previous type as nsubj(nominal subject) if np.root.tag_ == 'WP': prev_type = 'WP' if np.root.dep_ == 'attr': # if it's an attribute if prev_type not in [ 'pobj', 'nsubj' ]: # and the previous noun phrase's type was nsubj(nominal subject) subject.append(np.text.encode( 'utf-8')) # append the text of this noun phrase prev_type = 'attr' subject = ' '.join( subject).strip() # concatenate all noun phrases found if subject: # if the subject is not empty wh_found = False for word in doc: # iterate over the each word in the given command(user's speech) if word.tag_ in [ 'WDT', 'WP', 'WP$', 'WRB' ]: # check if there is a "wh-" question (we are determining that if it's a question or not, so only accepting questions with "wh-" form) wh_found = True if wh_found: # if that's a question straight = self.db_getter( subject) # get the answer from the database if straight is None: return self.db_getter(subject, True) # if nothing found then invert return straight else: verb_found = False verbtense = None # verbtense is the am/is/are of the main sentence clause = [] # is the information that we need to acknowledge for word in doc: if verb_found: # get the all words comes after the first verb which will be our verbtense if word.pos_ != 'PUNCT': # exclude punctuations clause.append(word.text.encode('utf-8')) if word.pos_ == 'VERB' and not verb_found: # if that's a verb and verb does not found yet then verb_found = True # verb is found verbtense = word.text.encode( 'utf-8') # append it to verbtense clause = ' '.join(clause).strip() # concatenate the clause return self.db_setter(subject, verbtense, clause, com) # set the record to the database # Function to get a record from the database def db_getter(self, subject, invert=False): if invert: result = self.db.search( Query().clause == subject ) # make a database search by giving subject string (inverted) else: result = self.db.search( Query().subject == subject) # make a database search by giving subject string if result: # if there is a result dictionary = {} for row in result: # iterate over the rows of the result if row['verbtense'] not in dictionary: # if the verbtense is not in the keys of the dictionary dictionary[row['verbtense']] = [] # then add it if row['clause'] not in dictionary[row[ 'verbtense']]: # if the clause is not in the value like; dictionary['is'] dictionary[row['verbtense']].append( row['clause']) # then append the clause if invert: answer = row[ 'subject'] # in WHO questions subject is actually the clause so we learn the subject from db else: answer = subject # the answer we will return first_verbtense = False for key, value in dictionary.iteritems( ): # iterate over the dictionary defined and assigned on above if not first_verbtense: # if the first verbtense assignment does not made yet answer += ' ' + str(key) # concatenate with a whitespace first_verbtense = True else: answer += ', ' + str( key) # otherwise concatenate with a comma + whitespace first_clause = False for clause in value: # iterate over the clauses of the key if not first_clause: # if the first verbtense assignment does not made yet answer += ' ' + clause # concatenate with a whitespace first_clause = True else: answer += ' AND ' + clause # otherwise concatenate with ' AND ' return self.mirror( answer) # mirror the answer (for example: I'M to YOU ARE) else: return None # if there is no result return None # Function to set a record to the database def db_setter(self, subject, verbtense, clause, com): if not self.db.search( (Query().subject == subject) & (Query().verbtense == verbtense) & (Query().clause == clause) ): # if there is no exacty record on the database then self.db.insert({ 'subject': subject, 'verbtense': verbtense, 'clause': clause }) # insert the given data return "OK, I GET IT. " + self.mirror( com) # mirror the command(user's speech) and return it to say # Function to mirror the answer (for example: I'M to YOU ARE) def mirror(self, answer): answer = answer.upper() # make the given string fully uppercase answer = self.forward_replace(answer) result = [] for word in answer.split(): # for each word in the answer replaced = False for key, value in self.replacements.iteritems( ): # iterate over the replacements if word == key: # if the word is equal to key result.append(value) # append the value replaced = True if word in ["WE", "US"]: result.append("YOU") replaced = True elif word == "OUR": result.append("YOUR") replaced = True elif word == "OURS": result.append("YOURS") replaced = True if not replaced: result.append(word) # otherwise append the word if ' '.join(result) != answer: # if there is a replacement return self.backward_replace(' '.join(result)) # return the result result = [] for word in answer.split(): # for each word in the answer replaced = False for value, key in self.replacements.iteritems( ): # invert the process above if word == key: # if the word is equal to key result.append(value) # append the value replaced = True if not replaced: result.append(word) # otherwise append the word return self.backward_replace( ' '.join(result)) # return the result anyway def forward_replace(self, string): string = string.replace("I WAS", "I-WAS") string = string.replace("YOU ARE", "YOU-ARE") string = string.replace("YOU WERE", "YOU-WERE") return string def backward_replace(self, string): string = string.replace("I-WAS", "I WAS") string = string.replace("YOU-ARE", "YOU ARE") string = string.replace("YOU-WERE", "YOU WERE") return string # Pronoun fixer to handle situations like YOU and YOURSELF def pronoun_fixer(self, subject): # TODO: Extend the context of this function subject = subject.upper() if 'YOURSELF' in subject: subject = subject.replace('YOURSELF', 'YOU') return subject
class AttestationEndpoint(resource.Resource): def __init__(self, config): resource.Resource.__init__(self) self.logger = logging.getLogger(self.__class__.__name__) self.config = config self.db = TinyDB('openwallet.json').table('attestation') # FIXME Hacky because developing on different domains def render_OPTIONS(self, request): request.setHeader('Access-Control-Allow-Methods', 'POST, GET, OPTIONS, DELETE, PUT') request.setHeader('Access-Control-Allow-Headers', 'content-type') return json.dumps({"fine": "fine"}) def render_POST(self, request): # Store a completed attestation parameters = json.loads(request.content.read()) required_fields = ['provider', 'attribute_name', 'attribute_value', 'attest_sig_b64', 'server_addr'] for field in required_fields: if field not in parameters: request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "missing %s parameter" % field}) provider_name = parameters['provider'] attribute_name = parameters['attribute_name'] attribute_value = parameters['attribute_value'] attest_sig_b64 = parameters['attest_sig_b64'] server_addr = parameters['server_addr'] # if provider_name not in PROVIDERS: # request.setResponseCode(http.BAD_REQUEST) # return json.dumps({"error": "unknown provider " + provider_name}) self.db.remove(where('provider') == provider_name and where('option') == attribute_name) attest_dict = {} attest_dict['sig'] = attest_sig_b64 attest_dict['provider'] = provider_name attest_dict['server'] = server_addr attest_dict['attribute_name'] = attribute_name attest_dict['attribute_value'] = attribute_value self.db.insert(attest_dict) return json.dumps({"success": True, "attestation": attest_dict}) def render_PUT(self, request): # Request an attestation. TODO Rename route parameters = json.loads(request.content.read()) required_fields = ['provider', 'option'] for field in required_fields: if field not in parameters: request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "missing %s parameter" % field}) provider_name = parameters['provider'] option_name = parameters['option'] if provider_name not in PROVIDERS: request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "unknown provider " + provider_name}) option = next( (o for o in PROVIDERS[provider_name]['options'] if o['name'] == option_name), None) if not option: request.setResponseCode(http.BAD_REQUEST) return json.dumps({"error": "unknown option " + option_name}) # FIXME Mocked attestation attestation = { "boo": "hoo", "sig": "sig?", "attributes": [{"name": option_name, "value": "123kvknr"}], "provider": provider_name, "reason": "You asked" } return json.dumps({"success": True, "attestation": attestation}) def render_GET(self, request): return json.dumps({"attestations": self.db.all()}) def getChild(self, path, request): return SpecificAttestationEndpoint(path, self.db)
m1 = mapping m2 = sess[mapping] if (mapping.startswith("#") or mapping.startswith(".")): m1 = mapping[1:] m2 = sess[mapping][1:] body = body.replace("\"" + m1 + "\"", "\"" + m2 + "\"") for form in sess["_formData"]: body += ";window.addEventListener('load', () => {document.getElementById('" + form[ "_id"] + "').addEventListener('submit', e => {" #body += "e.preventDefault();\n" for i in form.keys(): if (not i.startswith("_")): body += "Array.prototype.slice.call(document.getElementById('" + form[ "_id"] + "').getElementsByTagName('input')).forEach(i => {if (i.name === '" + form[ i] + "') {i.name = '" + i + "'}});" body += "})});" print(base64.b64encode(body.encode("utf-8")).decode("utf-8")) else: print(encoded_body) else: print(encoded_body) # remove expired session ids after 30 seconds or when all reqs have been made q = Query() db.remove((q._css_remaining == 0 and q._js_remaining == 0) or time.time() - q._creation_time > 30)
def test_remove(db: TinyDB): db.remove(where('char') == 'b') assert len(db) == 2 assert db.count(where('int') == 1) == 2
class KpiStats(object): """**Gathers repo statistics from a list of github urls into a TinyDB** The class uses github3.py to create an authenticated Github session. Session is authenticaed either by: #. Setting up a `github-token <https://help.github.com/articles/creating-an-access-token-for-command-line-use/>`_ and copying the key into a local file in the cwd called 'secret_key'. #. If no 'secret_key' file is detected, the user is prompted to enter a username and password. #. A github-token can be set as an environment variable 'GHUB_API_TOKEN' (used this on Travis). public repo stats can be retrieved). Two methods can be used to log in. Firstly using a github api token: The program will look for this in a file called 'secret_key' in the local folder. If this is not found the software will default to asking for a username and password. The class should be instansiated with a list of github repo url strings. :param urls: list of url strings ['https://github.com/<user>/<repo>',] :returns: KpiStats() object :Example: The follwing use case shows how to feed a list of urls to KpiStats and then read the TinyDB object produced (by default) into pandas. >>> from DashPykpi.kpistats import KpiStats, GitURLs, GraphKPIs >>> url_fetch = GitURLs() >>> urls = url_fetch.urls # If looking through all UCL associated repos need to remove the following # lines: # urls.remove('https://github.com/UCL/ucl') # urls.remove('https://github.com/UCL-RITS/ucl-rits') >>> test = KpiStats(urls=urls) >>> test.work(status=True) >>> db = TinyDB('tinydb_for_KPI.json') >>> df = pd.DataFrame(db.all()) """ def __init__(self, urls): if os.path.isfile('secret_key'): fn = open("secret_key") # Locally, with a secret_key file self.gh = login(token=fn.read().split()[0]) elif os.environ.get('GHUB_API_TOKEN'): # On Travis? (GHUB_API_TOKEN could be set...) self.gh = login(token=os.environ['GHUB_API_TOKEN']) else: # Or just use username/password method self.gh_name = input("Username to access github with:") pss = getpass.getpass(prompt='Ghub pswd {0}:'.format(self.gh_name)) self.gh = login(self.gh_name, pss) self.urls = urls # A list of URL strings self.repo = None self.stats = None self.db = TinyDB('tinydb_for_KPI.json') # create new or open existing def __str__(self): print("A KPI back-end to extract data from Github.") def get_repo_object_from_url(self, url): """Get a repository object for a given github url Retrieves a github3.py.Repository() object from a url string under an authenticated session. :param url: a string of format 'https://github.com/<user>/<repo>' :returns: github3.py.Repository() object as self.repo() """ demo = 'https://github.com/<user>/<repo>' er1 = "Error: url should be a string in format of " er2 = "Error: {0} isn't valid ".format(url) assert type(url) == str, er1 + demo assert url.split('/')[-3] == 'github.com', er2 user_str, repo_str = url.split('/')[-2:] self.repo = self.gh.repository(user_str, repo_str) return def get_repo_stats(self, debug=False): """Identify the statistics of an individual repo Examines self.repo() to identify key statistics from a repository. :param: self.repo() :rtype: A dictionary object as self.stats :Example: >>> from DashPykpi.kpistats import KpiStats >>> test = KpiStats(urls=["https://github.com/UCL-RITS/RSD-Dashboard"]) >>> test.get_repo_object_from_url(url=test.urls[0]) >>> test.get_repo_stats() >>> test.stats # print a dictionary of retrieved stats """ if debug: print('\nExamining repo {0}'.format(self.repo)) contribs = [(str(contrib.author), contrib.total) for contrib in self.repo.iter_contributor_statistics()] total = sum([user_num[1] for user_num in contribs]) branch_count = len([branch for branch in self.repo.iter_branches()]) commits_over_time = [commit for commit in self.repo.iter_commit_activity()] weekly_commits = [week['total'] for week in commits_over_time] self.stats = { 'stargazers': self.repo.stargazers, 'fork_count': self.repo.fork_count, 'commits_by_author': contribs, 'num_contributors': len(contribs), 'total_commits': total, 'repo_owner': self.repo.owner.login, 'repo_name': self.repo.name, 'branches': branch_count, 'language': self.repo.language, "weekly_commits": weekly_commits, } return def add_db_row(self): """KpiStats.add_db_row(self) Checks if there is a database and entry already present, if there isn't it adds a row to a database. If there is one already, it checks to see if the newly retrieved dictionary has updated info. If so, it removes the old row, and adds in the new one. If there is an error, and there is more than one row per repo it throws an assert error. :param: self :rtype: updates database connected to self.db """ DBfield = Query() results = self.db.search(DBfield.repo_name == self.repo.name) assert len(results) < 2, "Error, repeat entries in DB for same repo." if len(results) == 0: # if no record then add the results self.db.insert(self.stats) if len(results) == 1: # if record exists, but the user has rerun code eid = results[0].eid if results[0]['total_commits'] < self.stats['total_commits']: self.db.remove(eids=[eid]) # remove the old entry self.db.insert(self.stats) # add the new entry else: # condition where an entry exists in DB, # and new stats are no diffrent (no repo changes) pass return def clean_state(self): """Cleans the stats and repo objects from the class between updates Clean temporary data in the class before attempting to get a new repo object, specfically setting self.repo and self.stats to None. :param: self :rtype self.repo: None :rtype self.stats: None """ self.repo = None self.stats = None def work(self, status=False, debug=False, verbose=False, add_to_db=True): """ function:: KpiStats.work(self, status=False, debug=False, verbose=False, add_to_db=True) Main routine that handels passing single url strings to self.get_repo_object() to populate self.repo, and then calls self.get_repo_stats() to put statistics for each repo in a dictionary in self.stats. It then calls self.add_db_row() to write the dic data to a TinyDB file, and cleans the repo and stats objects from memory. Optionally, it also reports on the stats, progress, and execution of the called functions can be provided by via a status, debug and verbose flags. :Example: See DashPykpi.kpistats.KpiStats() """ for i, url in enumerate(self.urls): if status: print("\rComplete...{0:2.0f}%".format(((i+1)/len(self.urls) )*100.,), end="") self.get_repo_object_from_url(url=url) self.get_repo_stats(debug=debug) # Deal with html get timeout bug here -> retry if no commits found timeout_bug = self.stats['total_commits'] < 1 if timeout_bug: self.get_repo_stats() if add_to_db: self.add_db_row() if verbose: for k in sorted(self.stats): print(k, '-->', self.stats[k]) self.clean_state()
def test_remove_multiple(db: TinyDB): db.remove(where('int') == 1) assert len(db) == 0
class Learner(): """Class to provide the learning ability. """ def __init__(self, nlp): """Initialization method of :class:`dragonfire.learn.Learner` class. Args: nlp: :mod:`spacy` model instance. """ self.pronouns = collections.OrderedDict( ) # Create an ordered dictionary self.pronouns["I"] = "YOU" self.pronouns["ME"] = "YOU" self.pronouns["MY"] = "YOUR" self.pronouns["MINE"] = "YOURS" self.pronouns["MYSELF"] = "YOURSELF" self.pronouns["OUR"] = "YOUR" self.pronouns["OURS"] = "YOURS" self.pronouns["OURSELVES"] = "YOURSELVES" self.pronouns["WE"] = "YOU" self.pronouns["US"] = "YOU" self.inv_pronouns = collections.OrderedDict( ) # Create an ordered dictionary self.inv_pronouns["YOU"] = "I" self.inv_pronouns["YOUR"] = "MY" self.inv_pronouns["YOURS"] = "MINE" self.inv_pronouns["YOURSELF"] = "MYSELF" self.inv_pronouns["YOURSELVES"] = "OURSELVES" self.auxiliaries = collections.OrderedDict( ) # Create an ordered dictionary self.auxiliaries["AM"] = "ARE" self.auxiliaries["'M"] = " ARE" self.auxiliaries["WAS"] = "WERE" self.inv_auxiliaries = collections.OrderedDict( ) # Create an ordered dictionary self.inv_auxiliaries["ARE"] = "AM" self.inv_auxiliaries["WERE"] = "WAS" home = expanduser("~") # Get the home directory of the user self.db = TinyDB( home + '/.dragonfire_db.json' ) # This is where we store the database; /home/USERNAME/.dragonfire_db.json self.nlp = nlp # Load en_core_web_sm, English, 50 MB, default model self.is_server = False def respond(self, com, is_server=False, user_id=None): """Method to respond the user's input/command using learning ability. Args: com (str): User's command. Keyword Args: is_server (bool): Is Dragonfire running as an API server? user_id (int): User's ID. Returns: str: Response. .. note:: Entry function for :class:`Learner` class. Dragonfire calls only this function. It does not handle TTS. """ self.is_server = is_server is_public = True com = self.clean(com) doc = self.nlp( com ) # Command(user's speech) must be decoded from utf-8 to unicode because spaCy only supports unicode strings, self.nlp() handles all parsing subject = [ ] # subject list (subjects here usually are; I'M, YOU, HE, SHE, IT, etc.) types = [] # types of the previous noun phrases types.append("") for np in doc.noun_chunks: # Iterate over the noun phrases(chunks) TODO: Cover 'dobj' also; doc = nlp(u'DESCRIBE THE SUN') >>> (u'THE SUN', u'SUN', u'dobj', u'DESCRIBE') types.append(np.root.dep_) np_text, is_public = self.detect_pronoun(np.text) # print("IS_PUBLIC: ", is_public) # Purpose of this if statement is completing possessive form of nouns if np.root.dep_ == 'pobj' and types[ -2] == 'nsubj': # if it's an object of a preposition and the previous noun phrase's type was nsubj(nominal subject) then (it's purpose is capturing subject like MY PLACE OF BIRTH) subject.append( np.root.head.text ) # append the parent text from syntactic relations tree (example: while nsubj is 'MY PLACE', np.root.head.text is 'OF') subject.append( np_text ) # append the text of this noun phrase (example: while nsubj is 'MY PLACE', np.text is 'BIRTH') if np.root.dep_ == 'nsubj' and types[-2] not in [ 'pobj', 'nsubj' ] and np.root.tag_ not in [ 'WDT', 'WP', 'WP$', 'WRB' ]: # if it's a nsubj(nominal subject) ("wh-" words can be considered as nsubj(nominal subject) but they are out of scope. This is why we are excluding them.) subject.append(np_text) # append the text of this noun phrase if np.root.dep_ == 'attr' and types[-2] not in [ 'pobj', 'nsubj' ] and np.root.tag_ not in [ 'WDT', 'WP', 'WP$', 'WRB' ]: # if it's an attribute and the previous noun phrase's type was not nsubj(nominal subject) subject.append(np_text) # append the text of this noun phrase if np.root.dep_ == 'dobj' and types[-2] not in [ 'pobj', 'nsubj' ] and np.root.tag_ not in [ 'WDT', 'WP', 'WP$', 'WRB' ]: # if it's a dobj(direct object) and the previous noun phrase's type was not nsubj(nominal subject) subject.append(np_text) # append the text of this noun phrase subject = [x.strip() for x in subject] subject = ' '.join(subject) # concatenate all noun phrases found if subject: # if the subject is not empty if subject.upper( ) in self.inv_pronouns: # pass the learning ability if the user is talking about Dragonfire's itself return "" wh_found = False for word in doc: # iterate over the each word in the given command(user's speech) if word.tag_ in [ 'WDT', 'WP', 'WP$', 'WRB' ]: # check if there is a "wh-" question (we are determining that if it's a question or not, so only accepting questions with "wh-" form) wh_found = True if wh_found: # if that's a question straight = self.db_get( subject, is_public=is_public, user_id=user_id) # get the answer from the database if straight is None: return self.db_get( subject, is_public=is_public, user_id=user_id, invert=True) # if nothing found then invert return straight else: verb_found = False verbtense = None # verbtense is the am/is/are of the main sentence clause = [] # is the information that we need to acknowledge verbs = [] for word in doc: # print(word.text, word.lemma_, word.pos_, word.tag_, word.dep_, word.shape_, word.is_alpha, word.is_stop) if verb_found: # get the all words comes after the first verb which will be our verbtense if word.pos_ != 'PUNCT': # exclude punctuations clause.append(word.text) if word.pos_ == 'VERB' and word.is_stop and not verb_found: # if that's a verb and verb does not found yet then verb_found = True # verb is found verbtense = word.text # append it to verbtense if word.pos_ == 'VERB': verbs.append(word.text) clause = [x for x in clause] clause = ' '.join(clause).strip() # concatenate the clause # keywords to order get and remove operations on the database if any(verb in verbs for verb in self.upper_capitalize( ["forget", "remove", "delete", "update"])): if self.is_server and is_public: return "I cannot forget a general fact." return self.db_delete( subject, is_public=is_public, user_id=user_id ) # if there is a record about the subject in the database then remove that record and... if any(verb in verbs for verb in self.upper_capitalize( ["define", "explain", "tell", "describe"])): return self.db_get(subject, is_public=is_public, user_id=user_id) if verbtense: return self.db_upsert( subject, verbtense, clause, com, is_public=is_public, user_id=user_id) # set the record to the database def db_get(self, subject, invert=False, is_public=True, user_id=None): """Function to get a record from the database. Args: subject (str): Subject that extracted from the user's input/command. Keyword Args: invert (bool): Is it invert mode? (swap subject and clause) is_public (int): Is it a public record? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: u_id = 0 if not is_public and user_id: u_id = user_id db = pymysql.connect(Config.MYSQL_HOST, Config.MYSQL_USER, Config.MYSQL_PASS, Config.MYSQL_DB) cursor = db.cursor(pymysql.cursors.DictCursor) if invert: sql = "SELECT * FROM facts WHERE clause = '{}' AND user_id = '{}' ORDER BY counter DESC".format( subject, u_id) else: sql = "SELECT * FROM facts WHERE subject = '{}' AND user_id = '{}' ORDER BY counter DESC".format( subject, u_id) try: cursor.execute(sql) results = cursor.fetchall() if not results: return None row = results[0] answer = row['subject'] + ' ' + row['verbtense'] + ' ' + row[ 'clause'] return self.mirror(answer) except pymysql.InternalError as error: code, message = error.args print(">>>>>>>>>>>>>", code, message) return "Sorry, I encountered with a database problem." db.close() else: if invert: result = self.db.search( Query().clause == subject ) # make a database search by giving subject string (inverted) else: result = self.db.search( Query().subject == subject) # make a database search by giving subject string if result: # if there is a result dictionary = {} for row in result: # iterate over the rows of the result if row['verbtense'] not in dictionary: # if the verbtense is not in the keys of the dictionary dictionary[row['verbtense']] = [] # then add it if row['clause'] not in dictionary[row[ 'verbtense']]: # if the clause is not in the value like; dictionary['is'] dictionary[row['verbtense']].append( row['clause']) # then append the clause if invert: answer = row[ 'subject'] # in WHO questions subject is actually the clause so we learn the subject from db else: answer = subject # the answer we will return first_verbtense = False for key, value in dictionary.items( ): # iterate over the dictionary defined and assigned on above if not first_verbtense: # if the first verbtense assignment does not made yet answer += ' ' + str( key) # concatenate with a whitespace first_verbtense = True else: answer += ', ' + str( key ) # otherwise concatenate with a comma + whitespace first_clause = False for clause in value: # iterate over the clauses of the key if not first_clause: # if the first verbtense assignment does not made yet answer += ' ' + clause # concatenate with a whitespace first_clause = True else: answer += ' and ' + clause # otherwise concatenate with ' AND ' return self.mirror( answer) # mirror the answer (for example: I'M to YOU ARE) else: return None # if there is no result return None def db_upsert(self, subject, verbtense, clause, com, is_public=True, user_id=None): """Function to insert(or update) a record to the database. Args: subject (str): Subject that extracted from the user's input/command. verbtense (str): The am/is/are in the user's input/command. clause (str): Clause that contains the fact. com (str): User's command. Keyword Args: invert (bool): Is it invert mode? (swap subject and clause) is_public (int): Is it a public record? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: u_id = 0 if not is_public and user_id: u_id = user_id db = pymysql.connect(Config.MYSQL_HOST, Config.MYSQL_USER, Config.MYSQL_PASS, Config.MYSQL_DB) cursor = db.cursor(pymysql.cursors.DictCursor) sql1 = "SELECT * FROM facts WHERE subject = '{}' AND verbtense = '{}' AND clause = '{}' AND user_id = '{}'".format( subject, verbtense, clause, u_id) sql2 = """ INSERT INTO facts (subject, verbtense, clause, user_id) VALUES('{}', '{}', '{}', '{}') """.format(subject, verbtense, clause, u_id) sql3 = """ UPDATE facts SET counter = counter + 1 WHERE subject = '{}' AND verbtense = '{}' AND clause = '{}' AND user_id = '{}' """.format(subject, verbtense, clause, u_id) try: cursor.execute(sql1) results = cursor.fetchall() if not results: cursor.execute(sql2) db.commit() else: cursor.execute(sql3) db.commit() except pymysql.InternalError as error: code, message = error.args print(">>>>>>>>>>>>>", code, message) return "Sorry, I encountered with a database problem." db.close() else: if not self.db.search( (Query().subject == subject) & (Query().verbtense == verbtense) & (Query().clause == clause) ): # if there is no exacty record on the database then self.db.insert({ 'subject': subject, 'verbtense': verbtense, 'clause': clause }) # insert the given data return "OK, I get it. " + self.mirror( com) # mirror the command(user's speech) and return it to say def db_delete(self, subject, is_public=True, user_id=None): """Function to delete a record from the database. Args: subject (str): Subject that extracted from the user's input/command. Keyword Args: is_public (int): Is it a public record? (non-user specific) user_id (int): User's ID. Returns: str: Response. """ if self.is_server: if not is_public and user_id: db = pymysql.connect(Config.MYSQL_HOST, Config.MYSQL_USER, Config.MYSQL_PASS, Config.MYSQL_DB) cursor = db.cursor(pymysql.cursors.DictCursor) sql1 = "SELECT * FROM facts WHERE subject = '{}' AND user_id = '{}'".format( subject, user_id) sql2 = "DELETE FROM facts WHERE subject = '{}' AND user_id = '{}'".format( subject, user_id) try: cursor.execute(sql1) results = cursor.fetchall() if not results: db.close() return "I don't even know anything about " + self.mirror( subject) else: cursor.execute(sql2) db.commit() db.close() return "OK, I forgot everything I know about " + self.mirror( subject) except pymysql.InternalError as error: code, message = error.args print(">>>>>>>>>>>>>", code, message) return "Sorry, I encountered with a database problem." else: return "I don't even know anything about " + self.mirror( subject) else: if self.db.remove(Query().subject == self.fix_pronoun(subject)): return "OK, I forgot everything I know about " + self.mirror( subject) else: return "I don't even know anything about " + self.mirror( subject) def mirror(self, answer): """Function to mirror the answer (for example: I'M to YOU ARE). Args: answer (str): Prepared answer that just before the actual return of :func:`respond` method. Returns: str: Response. """ result = [] types = [] types.append("") doc = self.nlp(answer) for token in doc: types.append(token.lemma_) if token.lemma_ == "-PRON-": # if it's a pronoun, mirror it if token.text.upper() in self.pronouns: result.append( self.pronouns[token.text.upper()].lower().strip()) continue if token.text.upper() in self.inv_pronouns: result.append( self.inv_pronouns[token.text.upper()].lower().strip()) continue if (token.lemma_ == "be" or token.dep_ == "aux") and types[ -2] == "-PRON-": # if it's an auxiliary that comes right after a pronoun, mirror it if token.text.upper() in self.auxiliaries: result.append( self.auxiliaries[token.text.upper()].lower().strip()) continue if token.text.upper() in self.inv_auxiliaries: result.append(self.inv_auxiliaries[ token.text.upper()].lower().strip()) continue result.append(token.text.strip()) for i in range(len(result)): if result[i] == "i": result[i] = "I" result = ' '.join(result) # concatenate the result return result.replace( " '", "'") # fix for situations like "I 'AM", "YOU 'LL" def fix_pronoun(self, subject): # TODO: Extend the context of this function """Pronoun fixer to handle situations like YOU and YOURSELF. Args: subject (str): Subject that extracted from the user's input/command. Returns: str: Response. """ if subject == "yourself": return "you" elif subject == "Yourself": return "You" elif subject == "YOURSELF": return "YOU" else: return subject def detect_pronoun(self, noun_chunk): """Determine whether user is talking about himself/herself or some other entity. Args: noun_chunk (str): Noun phrase. Returns: ((str), (bool)): Detected pronoun and boolean value depends on the detection. """ np_text = "" is_public = True doc = self.nlp(noun_chunk) for token in doc: if token.lemma_ == "-PRON-": np_text += ' ' + token.text.lower() is_public = False else: np_text += ' ' + token.text return np_text.strip(), is_public def upper_capitalize(self, array): """Return capitalized and uppercased versions of the strings inside the given array. Args: array ((list) of (str)s): List of strings. Returns: (list) of (str)s: List of strings. """ result = [] for word in array: result.append(word) result.append(word.capitalize()) result.append(word.upper()) return result def clean(self, com): """Return a version of user's command that cleaned from punctuations, symbols, etc. Args: com (str): User's command. Returns: str: Cleaned version of user's command. """ doc = self.nlp(com) for token in doc: if token.pos_ in ["PUNCT", "SYM"]: com = com.replace(token.tag_, '') return com
def remove_test(db_file='db.json'): db = TinyDB(db_file) customer = Query() db.remove(customer.items < 40)
# Proceed if user user has enough money if subtotal <= balance: # Decrement stock for product in cart.values(): productDB.update({ "quantity": product["quantity"] - product["amount"] }, doc_ids=[product["id"]]) # Update Balance if paymentType == "card": accountDB.update({ "balance": round(newBalance, 2) }, doc_ids=[1]) # Delete coupon if coupon: couponDB.remove(doc_ids=[couponID]) # Log trasaction transactionID = transationDB.insert({ "timestamp": str(date.today()), "subtotal": round(response["subtotal"], 2), "coupon": coupon, "discount": round(subtotal * (int(coupon[4:]) / 100) if coupon else 0, 2), "cart": cart, "change": round(newBalance, 2) }) socket_client.send(pickle.dumps({ "success": True, "balance": round(newBalance, 2), "products": productDB.all(),
class GCodeRenderPlugin(octoprint.plugin.StartupPlugin, octoprint.plugin.SettingsPlugin, octoprint.plugin.EventHandlerPlugin, octoprint.plugin.BlueprintPlugin ): def initialize(self): # Because we use last modified, make sure we only get integers os.stat_float_times(False) # The actual render jobs self.renderJobs = Queue.Queue() # Prepare loks for render queue and database accessget_settings_defaults self.dbLock = threading.Lock() self.preview_extension = "png" # Initialize tinydb self._prepareDatabase() # Cleanup the database and previews folder self.cleanup() # Begin watching for render jobs self._start_render_thread() # Fill the queue with any jobs we may have missed self._updateAllPreviews() def _prepareDatabase(self): self.dbLock.acquire() self.previews_database_path = os.path.join(self.get_plugin_data_folder(), "previews.json") self.previews_database = TinyDB(self.previews_database_path) self._previews_query = Query() # underscore for blueprintapi compatability self.dbLock.release() def _updateAllPreviews(self, subFolder = None): """ Reads the entire preview database, checks if there are any outdated previews (last modified of preview is before last modified of gcode file) and updates these. """ current_folder = self._settings.global_get_basefolder('uploads') if subFolder: current_folder = os.path.join(current_folder, subFolder) self._logger.debug('Scanning folder {0} for render jobs'.format(current_folder)) for entry in os.listdir(current_folder): entry_path = os.path.join(current_folder, entry) entry_rel_path = entry if subFolder: entry_rel_path = subFolder + '/' + entry if os.path.isfile(entry_path): file_type = octoprint.filemanager.get_file_type(entry_rel_path) if(file_type): if file_type[0] is "machinecode": self._updatePreview(entry_path, entry_rel_path) else: self._updateAllPreviews(entry_rel_path) def _updatePreview(self, path, filename): """ Checks if the preview is up to date with the gcode file (based on last modified) and re-renders if neceserry. """ self.dbLock.acquire() db_entry = self.previews_database.get(self._previews_query.path == path) self.dbLock.release() modtime = os.path.getmtime(path) if db_entry is None or db_entry["modtime"] != modtime or not os.path.exists(db_entry["previewPath"]): self.render_gcode(path, filename, modtime) def cleanup(self): """ Loop through database, remove items not found in upload or preview folder """ self.dbLock.acquire() db_entries = self.previews_database.all() for db_entry in db_entries: if not os.path.exists(db_entry["previewPath"]) or not os.path.exists(db_entry["path"]): self.previews_database.remove(eids=[db_entry.eid]) self._logger.debug("Removed from preview database: %s" % db_entry["filename"]) #Loop through images, remove items not found in db image_folder = self._get_image_folder() for entry in os.listdir(image_folder): entry_path = os.path.join(image_folder, entry) if entry_path.endswith(self.preview_extension) and \ not self.previews_database.contains(self._previews_query.previewPath == entry_path): try: os.remove(entry_path) self._logger.debug("Removed preview %s" % entry_path) except Exception: self._logger.debug("Could not remove preview %s" % entry_path) self.dbLock.release() def on_event(self, event, payload, *args, **kwargs): if event == Events.UPLOAD: if "path" in payload: gcodePath = os.path.join(self._settings.global_get_basefolder('uploads'), payload["path"]) self.render_gcode(gcodePath, payload["name"]) else: self._logger.debug("File uploaded, but no metadata found to create the gcode preview") def is_blueprint_protected(self): return False def get_settings_defaults(self): return dict( maxPreviewFileSize=52428800 # 50 MB ) def render_gcode(self, path, filename, modtime = None): """ Adds a render job to the render queue """ if not os.path.exists(path): self._logger.debug("Could not find file to render: {0}".format(path)) return if not modtime: modtime = os.path.getmtime(path) #TODO: Some error handling; or return a dummy preview maxFileSize = self._settings.get_int(["maxPreviewFileSize"]) if maxFileSize > 0 and os.path.getsize(path) > maxFileSize: self._logger.warn("GCode file exceeds max preview file size: %s" % filename) return # Add the job to the render queue self.renderJobs.put({ "path": path, "filename": filename, "modtime": modtime}) self._logger.debug("Render job enqueued: %s" % filename) @octoprint.plugin.BlueprintPlugin.route("/previewstatus/<path:filename>", methods=["GET"]) def previewstatus(self, filename): """ Allows to check whether a preview is available for a gcode file. Query string arguments: filename: The gcode file to get the preview status for make: Whether or not to start rendering the preview, if there's no preview ready GET /previewstatus/<filename> """ #TODO: Add support for other statusses, such as 'rendering failed', 'gcode too big', 'queued for rendering' etc if not filename: response = make_response('Invalid filename', 400) else: # First check in the database whether a preview is available self._logger.debug("Retrieving preview status for %s" % filename) self.dbLock.acquire() db_entry = self.previews_database.get(self._previews_query.filename == filename) self.dbLock.release() if not db_entry: response = make_response(jsonify({ 'status': 'notfound'}), 200) elif os.path.exists(db_entry["previewPath"]): response = make_response(jsonify({ 'status': 'ready', 'previewUrl' : db_entry["previewUrl"] }), 200) else: self._logger.debug("Preview file not found: %s" % db_entry["previewPath"]) response = make_response(jsonify({ 'status': 'notfound'}), 200) return self._make_no_cache(response) @octoprint.plugin.BlueprintPlugin.route("/preview/<path:previewFilename>", methods=["GET"]) def preview(self, previewFilename): """ Retrieves a preview for a gcode file. Returns 404 if preview was not found GET /preview/file.gcode """ if not previewFilename: response = make_response('Invalid filename', 400) else: self._logger.debug("Retrieving preview %s" % previewFilename) # Check the database for existing previews self.dbLock.acquire() db_entry = self.previews_database.get(self._previews_query.previewFilename == previewFilename) self.dbLock.release() # Return the preview file if it is found, otherwise 404 if not db_entry or not os.path.exists(db_entry["previewPath"]): response = make_response('No preview ready', 404) else: response = send_file(db_entry["previewPath"]) return response @octoprint.plugin.BlueprintPlugin.route("/allpreviews", methods=["GET"]) def getAllPreviews(self): """ Gets a list of all gcode files for which a preview is available. Useful for initial display of a gcode file list. Removes the need for calling previewstatus a lot of times. """ self.dbLock.acquire() db_entries = self.previews_database.all() self.dbLock.release() previews = [] for db_entry in db_entries: if os.path.exists(db_entry["previewPath"]): previews.append({ "filename": db_entry["filename"], "previewUrl" : db_entry["previewUrl"] }) response = make_response(jsonify({ "previews" : previews })) return self._make_no_cache(response) def _start_render_thread(self): """" Start the daemon thread that watches the render job queue """ t = threading.Thread(target=self._render_gcode_watch) t.setDaemon(True) t.start() def _initialize_parser(self): # Read throttling settings from OctoPrint throttling_duration = 0 throttling_interval = 0 # Make an exception when we're debugging on Windows if sys.platform != "win32": # OctoPrint default 10ms default_throttle = self._settings.global_get_float(["gcodeAnalysis", "throttle_normalprio"]) # Old OctoPrint versions don't have this setting. Default to 10ms if default_throttle is None: default_throttle = 0.01 throttling_duration = (int)(1000 * default_throttle) default_throttle_lines = self._settings.global_get_int(["gcodeAnalysis", "throttle_lines"]) # Old OctoPrint versions don't have this setting. Default to 100 lines if default_throttle_lines is None: default_throttle_lines = 100 # OctoPrint default 100, multiply by 50 because we're at C speed, and we're crunching more efficiently throttling_interval = 50 * default_throttle_lines initialized = False try: initialized = gcodeparser.initialize(width=250, height=250, throttling_interval=throttling_interval, throttling_duration=throttling_duration, logger=self._logger) except Exception as e: self._logger.exception("Exception while initializing gcodeparser") return False if initialized: try: gcodeparser.set_print_area(x_min=-37, x_max=328, y_min=-33, y_max=317, z_min=0, z_max=205) gcodeparser.set_camera(target="part", distance=(-300, -300, 150)) gcodeparser.set_background_color((1.0, 1.0, 1.0, 1.0)) gcodeparser.set_bed_color((0.75, 0.75, 0.75, 1.0)) gcodeparser.set_part_color((67.0 / 255.0, 74.0 / 255.0, 84.0 / 255.0, 1.0)) except Exception as e: self._logger.exception("Exception while configuring gcodeparser") return False return True def _render_gcode_watch(self): """" The actual rendering thread. Monitors the render queue, and initiates the render job. """ # It is important we initialize the gcoderender on this thread (for the drawing context) initialized = self._initialize_parser() if not initialized: self._logger.error("Couldn't initialize gcodeparser") return while True: job = self.renderJobs.get() # Will block until a job becomes available self._logger.debug("Job found: {0}".format(job['filename'])) t0 = time.time() self._render_gcode_worker(job['path'], job['filename'], job['modtime']) t1 = time.time() self._logger.info("Rendered preview for {filename} in {t:0.0f} s".format(filename=job['filename'], t=(t1-t0))) self.renderJobs.task_done() def _render_gcode_worker(self, path, filename, modtime): """ Renders a preview for a gcode file and inserts a record into the preview database. """ if not octoprint.filemanager.valid_file_type(path, type="gcode"): self._logger.debug('Not a valid file type: %s' % path) return if not os.path.exists(path): self._logger.debug('File doesn\'t exist: %s' % path) return if filename.startswith("."): #TODO: Perform a more comprehensive hidden file check self._logger.debug('Hidden file: %s' % path) return # Notify the client about the render self._send_client_message("gcode_preview_rendering", { "filename": filename }) # Get a filename for the preview. By including modtime, the previews may be cached by the browser imageDest = self._get_imagepath(filename, modtime) self._logger.debug("Image path: {}".format(imageDest["path"])) # This is where the magic happens self._logger.debug("Begin rendering"); returncode = 1 try: success = gcodeparser.render_gcode(path, imageDest["path"]) except Exception as e: self._logger.debug("Error in Gcodeparser: %s" % e.message) if success: # Rendering succeeded self._logger.debug("Render complete: %s" % filename) url = '/plugin/gcoderender/preview/%s' % imageDest["filename"] else: # Rendering failed. # TODO: set url and path to a failed-preview-image self._logger.warn("Render failed: %s" % filename) return # Query the database for any existing records of the gcode file. # Then, update or insert record self.dbLock.acquire() db_entry = self.previews_database.get(self._previews_query.path == path) if not db_entry: self.previews_database.insert({ "filename" : filename, "path": path, "modtime" : modtime, "previewUrl" : url, "previewFilename" : imageDest["filename"], "previewPath" : imageDest["path"] }) else: try: os.remove(db_entry["previewPath"]) except Exception: self._logger.debug("Could not delete preview %s" % db_entry["previewPath"]) self.previews_database.update({ "modtime" : modtime, "previewUrl" : url, "previewPath" : imageDest["path"], "previewFilename" : imageDest["filename"] } , self._previews_query.path == path) self.dbLock.release() # Notify client the preview is ready self._send_client_message("gcode_preview_ready", { "filename": filename, "previewUrl": url }) def _make_no_cache(self, response): """ Helper method to set no-cache headers. Not used anymore, as including modtime in filename allows browser caching """ response.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0" response.headers["Pragma"] = "no-cache" response.headers["Expires"] = "-1" return response def _get_image_folder(self): """ Gets the folder to save the previews to """ return self._settings.get_plugin_data_folder() def _get_imagepath(self, filename, modtime = None): """ Creates a filename for the preview. Returns both filename and (full) path """ # Strip any subfolders and find the basename of the file _, tail = os.path.split(filename) name, _ = os.path.splitext(tail) images_folder = self._get_image_folder() if not modtime: modtime = time.clock() new_filename = "{0}_{1}.{2}".format(name, modtime, self.preview_extension) image_path = os.path.join(images_folder, new_filename) return dict(path = image_path, filename = new_filename) def _send_client_message(self, message_type, data=None): """ Notify the client """ self._logger.debug("Sending client message with type: {type}, and data: {data}".format(type=message_type, data=data)) self._plugin_manager.send_plugin_message(self._identifier, dict(type=message_type, data=data))
class Client(object): """Client library for controlling a HPG Microwave source :param logger: Use an existing logger to attached the child logger. :type client: class:`logging.Logger` :param settings: A dictionary holding all settings to be updated. Defaults to ``None`` :type settigs: dict, optional """ def __init__(self, *args, **kwargs): """Class constructor """ self._configure_logger() self.update_settings() self.__is_available = False self.__is_locked = False self._presets = TinyDB('presets.json') self.__is_available_last = 0 self.__cache_age = 0 self.__reconnect_time = 0 @classmethod def get_defaults(cls): """Return the default class values as a dict :return: A dict holding all class default values :rtype: dict .. note:: The structure of the return dict is identitcal to the dict structure necessary in :meth:`Client.updateSettings()`. """ return { 'connection': { 'host': '192.168.0.111', 'port': 5025, 'delay': 0.01 }, 'mode': 'PWM', 'frequency': { 'current': 245000 }, 'power': { 'current': 1 }, 'rf': 0, 'ontime': { 'current': 10000 }, 'offtime': { 'current': 1000000 }, 'loglevel': 'DEBUG', 'cache': False, 'id': None, 'serial': None, 'version': None } def update_settings(self, settings=None): """Overwrite the local settings with new values and update internal components :param settings: A dictionary holding all settings to be updated. Default is ``{}`` :type settigs: dict, optional **Example 1** .. code-block:: python :linenos: # Update device IP settings = {'connection': {'ip': '192.168.0.1'}} Client.updateSettings(settings) **Example 2** .. code-block:: python :linenos: # Update device port and frequency settings= {'connection': {'port': 1234}, 'frequency': 245000} Client.updateSettings(settings) .. note:: The dict in ``settings`` has the same structure than the dict returned by ``.getDefaults`` """ self._settings = getattr(self, '_settings', Client.get_defaults()) if settings is None: return False for key in ['rf', 'frequency', 'mode']: self._settings[key] = settings.get(key, self._settings[key]) for key in ['power', 'ontime', 'offtime']: self._settings[key].update(settings.get(key, {})) if 'port' in settings.get('connection', {}): self.port = int(self._settings['connection']['port']) if 'host' in settings.get('connection', {}): self.host = self._settings['connection']['host'] @property def ready(self): if not self.available: return False for attr in ['ontime', 'offtime', 'frequency', 'power', 'mode']: if getattr(self, attr, None) is None: self.logger.info(f'Attribute {attr} is not set!') return False return True def preset_save(self, id=None, name=None): settings = {} settings.update(self._settings) settings['_preset_name'] = str(name) for key in ['connection', 'id', 'version', 'serial', 'loglevel']: del settings[key] if id is None: id = self._presets.insert(settings) self.logger.info(f'Create new preset with ID {id}') else: self._presets.update(self._settings, doc_ids=[id]) self.logger.info(f'Update preset {id}') return id def preset_delete(self, id): try: self._presets.remove(doc_ids=[id]) except KeyError: self.logger.error(f'Could not delete preset {id}') else: self.logger.info(f'Delete preset {id}') def preset_list(self, show_in_log=False): all_presets = self._presets.all() if len(all_presets) == 0: self.logger.info('No presets in database') if show_in_log: for entry in all_presets: id, name = entry.doc_id, entry['_preset_name'] self.logger.info(f'{id} - {name}') return all_presets @check_lock def preset_load(self, id): settings = self._presets.get(doc_id=id) if settings is not None: self.update_settings(settings=settings) self.apply() self.logger.info(f'Load preset {id}') else: self.logger.error('Could not find preset {id}') @check_lock def apply(self, safe_mode=True): if not self.available: # Abort if device is not available return # Abort if at least one attribute is not set for attr in ['mode', 'rf', 'frequency', 'ontime', 'offtime', 'power']: if getattr(self, attr, None) is None: return for key in ['mode', 'rf', 'frequency']: setattr(self, key, self._settings[key]) for key in ['ontime', 'offtime', 'power']: setattr(self, key, self._settings[key]['current']) self.logger.info('Apply settings to device') # @check_available def __load_from_device(self): """Load values form device This method loads all values from the device and checks them against the values stored in the cache. If the values differ the values are updated. """ # self.getIDN() if not self.__is_available: return previous_cache_state = self.caching self.caching = False cached_power = self._settings['power']['current'] current_power = self.power if cached_power is not None and current_power is not None: if getattr(self, 'power_step', None) is not None: cached_power *= self.power_step or 1 if cached_power != current_power: self.logger.warning(f'Generator changed power from {cached_power} to {current_power}') cached_mode = self._settings['mode'] current_mode = self.mode if cached_mode is not None and current_mode is not None: if cached_mode != current_mode: self.logger.warning(f'Generator changed mode from {cached_mode} to {current_mode}') self.mode = cached_mode cached_offtime = self._settings['offtime']['current'] current_offtime = self.offtime if current_offtime is not None and cached_offtime is not None: if cached_offtime != current_offtime: self.logger.warning(f'Generate changed offtime from {cached_offtime} to {current_offtime}') self.offtime = cached_offtime cached_ontime = self._settings['ontime']['current'] current_ontime = self.ontime if current_ontime is not None and cached_ontime is not None: if cached_ontime != current_ontime: self.logger.warning(f'Generate changed ontime from {cached_ontime} to {current_ontime}') self.ontime = cached_ontime cached_frequency = self._settings['frequency'] current_frequency = self.frequency if current_frequency is not None and cached_frequency is not None: if cached_frequency != current_frequency: self.logger.warning(f'Generator changed frequency from {cached_frequency} to {current_frequency}') self.frequency = cached_frequency self.caching = previous_cache_state def lock(self): self.logger.info('Lock device') self.__is_locked = True def unlock(self): self.logger.info('Unlock device') self.__is_locked = False @property def locked(self): return self.__is_locked def _configure_logger(self, name=None, level=None, parent=None): """Get child of a given logger or setup standalone default logger :param logger: Use an existing logger to attached the child logger. :type client: class:`logging.Logger` :param name: Set a specific name for the child logger. Defaults to ``__class__.__name__`` :type name: str, optional :param level: Set the loglevel for the child logger. Defaults to ``__class__.LOGLEVEL`` :type level: str, optional """ level = 'DEBUG' if level is None else level name = self.__class__.__name__ if name is None else name if parent is None: self.logger = logging.getLogger(name) else: self.logger = logging.getLogger(parent).getChild(name) self.logger.setLevel(getattr(logging, level)) def __connect(self, timeout=0.5): """Connect a socket. :param timeout: Set the socket timeout in seconds. Defauls to ``1``. :type timeout: int, optional """ time.sleep(self._settings['connection']['delay']) # if self.__reconnect_time > time.time(): # return try: self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.s.settimeout(timeout) except socket.error as msg: self.logger.error('Failed to create and connect. \ Error code: {}, Error message: {}'.format(*msg)) raise DeviceNotAvailable except Exception as e: self.logger.error(f'Socket exception: {e}') raise DeviceNotAvailable else: host = str(self.host) try: self.s.connect((host, self.port)) except socket.timeout: self.logger.error(f'Timeout while connecting to {host}:{self.port}') raise DeviceNotAvailable except socket.error: self.logger.error(f'Error connecting to {host}:{self.port}') raise DeviceNotAvailable else: # self.available = True self.logger.debug(f'Connected to {host}:{self.port}') def __disconnect(self): """Close the socket. """ self.s.close() def __set_command(self, cmd, encoding='utf-8'): """Send a command and do not wait for a response :param cmd: Command to send :type cmd: str :param encoding: Encode the command with this encoding. Defaults to `utf-8`. :type encoding: str, optional """ try: self.__connect() except DeviceNotAvailable: self.available = False return else: self.available = True device_available = False try: self.s.sendall(f'{cmd}\r\n'.encode(encoding)) except socket.timeout: self.logger.error(f'SET-Timeout while connecting to {self.host}:{self.port}') raise DeviceNotAvailable except socket.error: self.logger.error(f'Error connecting to {self.host}:{self.port}') raise DeviceNotAvailable else: device_available = True self.logger.debug(f'Send "{cmd}"') self.available = device_available self.__disconnect() def __get_command(self, cmd, encoding='utf-8', buffer_length=4096): """Send a query and wait for a response. :param cmd: Command to send :type cmd: str :param encoding: Encode the command and decode the response with this encoding. Defaults to `utf-8` :type encoding: str, optional :param buffer_length: Socket receive buffer length in bytes. :type buffer_length: int, optional """ try: self.__connect() except DeviceNotAvailable: self.available = False raise DeviceNotAvailable else: self.available = True response = None try: self.s.sendall('{}\r\n'.format(cmd).encode(encoding)) self.logger.debug('Send "{}"'.format(cmd)) except socket.timeout: self.logger.error(f'GET-Timeout while connecting to {self.host}:{self.port}') except socket.error: self.logger.error(f'Error connecting to {self.host}:{self.port}') else: try: response = self.s.recv(buffer_length).decode(encoding).rstrip('\r\n') self.logger.debug(f'Receive "{response}"') except socket.timeout: self.logger.error(f'Timeout while connecting to {self.host}:{self.port}') except socket.error: self.logger.error(f'Error connecting to {self.host}:{self.port}') self.__disconnect() if response is not None: self.available = True return response self.available = False raise DeviceNotAvailable @property def host(self): return self._settings['connection']['host'] @host.setter def host(self, value): self._settings['connection']['host'] = value self.__is_available_last = 0 self.__cache_age = 0 @property def port(self): return self._settings['connection']['port'] @port.setter def port(self, value): self._settings['connection']['port'] = value self.__is_available_last = 0 self.__cache_age = 0 @property def caching(self): return self._settings['cache'] @caching.setter def caching(self, value): self._settings['cache'] = bool(value) @property def serial(self): return self._settings['serial'] @serial.setter def serial(self, value): self._settings['serial'] = value @property def id(self): return self._settings['id'] @id.setter def id(self, value): self._settings['id'] = value @property def version(self): return self._settings['version'] @version.setter def version(self, value): self._settings['version'] = value @property def mode(self): """Get the operation mode. :getter: Return the operation mode. Returns the cached value if caching is enabled. :setter: Set a new operation mode and save it in the settings. The value is only applied if it differs from the actual setting. :type: str **Example** .. code-block:: python :linenos: Client.mode = 'CW' # Set the mode to CW .. note:: Valid operation modes are `CW`, `PWM`, `NWA` and `PWM-NWA`. All other values are ignored. """ cached_mode = self._settings['mode'] if self.caching and self._settings['mode'] is not None: return cached_mode try: self._settings['mode'] = self.__get_command('CONF:MOD?').replace('"', '') except ValueError: self.logger.error('Received mode was faulty. Set default') self.frequency = Client.get_defaults()['mode'] except DeviceNotAvailable: self.logger.error('Could not read mode') self._settings['mode'] = None # Client.MODE return None return self._settings['mode'] @mode.setter @check_lock def mode(self, value): """Only set the mode if `value` is set to a valid operation mode and if `value` differs from the current setting. """ try: value = str(value) except ValueError: raise ValueError if value not in ['CW', 'PWM', 'NWA', 'NWA-PWM']: raise UnkownCommandException(f'Unknown command {value}') if value == self._settings['mode'] and self.caching: return try: self.__set_command(f'CONF:MOD {value}') except DeviceNotAvailable: self.logger.warning('Could not set mode') else: self.logger.info(f'Set mode to {value}') self._settings['mode'] = value @property def frequency(self): """Get the output frequency in multiples of 10kHz. :getter: Return the operation frequency. Returns the cached value if caching is enabled. :setter: Set a new operation frequency and save it in the settings. The value is only applied if it differs from the actual setting. :type: int :raises: ValueError **Example** .. code-block:: python :linenos: Client.frequency = 245000 # Set the frequency to 2,45GHz .. note:: The operating frequency is returned and set in multiples of 10kHz. It is not checked wether ``value`` fullfills this requirement or the limits from the devices datasheet. """ cached_frequency = self._settings['frequency'] if self.caching and cached_frequency is not None: return cached_frequency try: self._settings['frequency'] = int(self.__get_command('CONF:FREQ?')) except ValueError: self.logger.error('Received frequency was faulty. Set default') self.frequency = Client.get_defaults()['frequency'] except DeviceNotAvailable: self._settings['frequency'] = None self.logger.error('Could not read frequency') return None return self._settings['frequency'] @frequency.setter @check_lock def frequency(self, value): """Only set frequency if `value` differs from the current setting. """ try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Frequency must be positive and integer') if value == self._settings['frequency'] and self.caching: # Only set if value is different or query return try: self.__set_command(f'CONF:FREQ {value}') except DeviceNotAvailable: self.logger.warning('Could not set frequency') else: self.logger.info(f'Set frequency to {value}') self._settings['frequency'] = value @property def fsweep_frequency_start(self): """The first frequency of the frequency sweep :getter: Return the start frequency of the sweep in multiples of 10KHz. :setter: Set the start frequency of thr sweep in multiples of 10KHz. :type: float """ return float(self.__get_command('CONFigure:FSWEEp:FSTART?')) @fsweep_frequency_start.setter def fsweep_frequency_start(self, value): if self.__is_locked: return try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Frequency must be positive and integer') else: self.__set_command('CONFigure:FSWEEp:FSTART {}'.format(value)) @property def fsweep_frequency_step(self): """The frequency step of the frequency sweep :getter: Return the frequency step of the sweep in multiples of 10KHz. :setter: Set the frequency step of thr sweep in multiples of 10KHz. :type: float """ return float(self.__get_command('CONFigure:FSWEEp:FSTEP?')) @fsweep_frequency_step.setter def fsweep_frequency_step(self, value): if self.__is_locked: return try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Frequency must be positive and integer') else: self.__set_command('CONFigure:FSWEEp:FSTEP {}'.format(value)) @property def fsweep_frequency_stop(self): """The last frequency of the frequency sweep :getter: Return the last frequency of the sweep in multiples of 10KHz. :setter: Set the last frequency of thr sweep in multiples of 10KHz. :type: float """ return float(self.__get_command('CONFigure:FSWEEp:FSTOP?')) @fsweep_frequency_stop.setter def fsweep_frequency_stop(self, value): if self.__is_locked: return try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Frequency must be positive and integer') else: self.__set_command('CONFigure:FSWEEp:FSTOP {}'.format(value)) def fsweep_start(self): """Start the frequency sweep. .. note:: The frequency sweep will only start if there is no other frequency sweep or sequence running on the device. """ if self.fsweep_status: self.logger.error('A frequency sweep is already running.') elif self.sequence_status: self.logger.error('A sequence is already running.') else: self.logger.info('Start the frequency sweep') self.__set_command('CONTrol:FSWEEp:START') @property def fsweep_status(self): """Query if a frequency sweep is already running :getter: Check if a frequency sweep is currently running on the device :type: bool """ return bool(self.__get_command('CONTrol:FSWEEp:STATus?')) @property def sequence_status(self): """Query if a sequence is already running :getter: Check if a sequence is currently running on the device :type: bool """ return bool(self.__get_command('CONTrol:SEQuence:STATus?')) def fsweep_stop(self): """Stop the frequency sweep. """ self.logger.info('Stop the frequency sweep') self.__set_command('CONTrol:FSWEEp:STOP') @property def power(self): """Get the output power in multiples of 10W. :getter: Return the output power. Returns the cached value if caching is enabled. :setter: Set a new output power and save it in the settings. The value is only applied if it differs from the actual setting. :type: int **Example** .. code-block:: python :linenos: Client.power = 2 # Set the output power to 20W .. note:: The output power is returned and set in multiples of 10W. It is not checked wether ``value`` fullfills this requirement or the limits from the devices datasheet. """ if self.caching and self._settings['power']['current'] is not None: return self._settings['power']['current'] try: self._settings['power']['current'] = float(self.__get_command('CONF:POW?')) except ValueError: self.logger.error('Received power was faulty. Set default') self.power = Client.get_defaults()['power']['current'] except DeviceNotAvailable: self.logger.warning('Device not available, could not read power.') self._settings['power']['current'] = None return None return self._settings['power']['current'] @power.setter @check_lock def power(self, value): """Only set power if `value` differs from the current setting. """ try: value = float(value) except ValueError: value = -1 if value < 0: raise ValueError('Power must be positive and int') if value == self._settings['power']['current'] and self.caching: return try: self.__set_command(f'CONF:POW {value}') # Only set if value is different or query except DeviceNotAvailable: self.logger.warning('Device not available, could not set power.') self.available = False else: self.logger.info(f'Set power to {value}W') self._settings['power']['current'] = value @property def rf(self): """Get the rf output state. :getter: Return the output state. Returns the cached value if caching is enabled. :setter: Set a new output state and save it in the settings. Set ``0`` for `Off` and ``1`` for `On`. The value is only applied if it differs from the actual setting. :type: int **Example** .. code-block:: python :linenos: Client.rf = 1 # Enable the RF output """ if self.caching and self._settings['rf'] is not None: return self._settings['rf'] try: self._settings['rf'] = int(self.__get_command('CONT:RF?')) except DeviceNotAvailable: self.logger.warning('Device not available, could not read RF state.') self._settings['rf'] = None return None except ValueError: self.logger.error('Received RF state was faulty. Disable RF') self.rf = 0 return self._settings['rf'] @rf.setter @check_lock def rf(self, value): """Only set rf if `value` differs from the current setting. """ try: value = int(float(value)) except ValueError: value = -1 if value not in [0, 1]: raise ValueError('RF must be 0 or 1') if value == self._settings['rf'] and self.caching: # Only set if value is different or query return try: self.__set_command(f'CONT:RF {value}') except DeviceNotAvailable: self.logger.warning('Device not available, could not set RF state.') self.available = False else: self.logger.info(f'Set RF to {value}') self._settings['rf'] = value @property def ontime(self): """Get the PWM ontime in ns. :getter: Return the ontime. Returns the cached value if caching is enabled. :setter: Set the ontime in ns. The value is only applied if it differs from the actual setting. :type: int **Example** .. code-block:: python :linenos: Client.ontime = 10000 # Set the PWM ontime to 10µs """ cached_ontime = self._settings['ontime']['current'] default_ontime = Client.get_defaults()['ontime']['current'] if self.caching and cached_ontime is not None: return cached_ontime try: self._settings['ontime']['current'] = int(self.__get_command('CONF:PWM:ON?')) except ValueError: self.logger.error('Received ontime was faulty. Set default') self.ontime = default_ontime except DeviceNotAvailable: self._settings['ontime']['current'] = None self.logger.error('Could not read ontime') return None return self._settings['ontime']['current'] @ontime.setter @check_lock def ontime(self, value): """Only set ontime if `value` differs from the current setting. """ try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Ontime must be positive and int') elif value == self._settings['ontime']['current'] and self.caching: return try: self.__set_command(f'CONF:PWM:ON {value}') except DeviceNotAvailable: self.available = False self.logger.error('Could not set ontime') else: self.logger.info(f'Set ontime to {value}ns') self._settings['ontime']['current'] = value @property def offtime(self): """Get the PWM offtime in ns. :getter: Return the offtime. Returns the cached value if caching is enabled. :setter: Set the offitime in ns. The value is only applied if it differs from the actual setting. :type: int **Example** .. code-block:: python :linenos: Client.offtime = 10000 # Set the PWM offtime to 10µs """ cached_offtime = self._settings['offtime']['current'] default_offtime = Client.get_defaults()['offtime']['current'] if self.caching and cached_offtime is not None: return cached_offtime try: self._settings['offtime']['current'] = int(self.__get_command('CONF:PWM:OFF?')) except ValueError: self.logger.error('Received offtime was faulty. Set default') self.offtime = default_offtime except DeviceNotAvailable: self._settings['offtime']['current'] = None self.logger.error('Could not read offtime') return None return self._settings['offtime']['current'] @offtime.setter @check_lock def offtime(self, value): """Only set offtime if `value` differs from the current setting. """ try: value = int(float(value)) except ValueError: value = -1 if value < 0: raise ValueError('Offtime must be positive and integer') elif value == self._settings['offtime']['current'] and self.caching: # Do nothing if caching is enabled and value not changed return try: self.__set_command(f'CONF:PWM:OFF {value}') except DeviceNotAvailable: self.logger.error('Could not set offtime') else: self.logger.info(f'Set offtime to {value}ns') self._settings['offtime']['current'] = value @property def temperature(self): """Fetch and return the current temperature in °C from the device. :return: Temperature in °C. :rtype: float .. note:: The HPG device has several temperature sensors. This one is located is located next to the circulator and load. """ if not getattr(self, '_last_update_temperature', False): self._last_update_temperature = 0 if time.time() - self._last_update_temperature > 1: try: self._temperature = float(self.__get_command('SENSe:TEMPerature?')) except DeviceNotAvailable: return None except ValueError: self.logger.error('Could not read temperature') return None else: self._last_update_temperature = time.time() return self._temperature @property def power_forward(self): """Fetch and return the measured forward power in Watt [W] from the device :return: Forward power in Watt [W] :rtype: float .. note:: These values are not very accurate. You may use them for getting a ballpark figure but you'll need to add an external measurement for more precise values. """ if not getattr(self, '_last_update_power_forward', False): self._last_update_power_forward = 0 if time.time() - self._last_update_power_forward > 1: try: self._power_forward = float(self.__get_command('SENSe:POWer:FORward?')) except DeviceNotAvailable: return None except ValueError: self.logger.error('Could not read forward power') return None else: self._last_update_power_forward = time.time() return self._power_forward @property def power_reflected(self): """Fetch and return the measured reflected power in Watt [W] from the device :return: Forward power in Watt [W] :rtype: float .. note:: These values are not very accurate. You may use them for getting a ballpark figure but you'll need to add an external measurement for more precise values. """ if not getattr(self, '_last_update_power_reflected', False): self._last_update_power_reflected = 0 if time.time() - self._last_update_power_reflected > 1: try: self._power_reflected = float(self.__get_command('SENSe:POWer:REFLect?')) except DeviceNotAvailable: return None except ValueError: self.logger.error('Could not read reflected power') return None else: self._last_update_power_reflected = time.time() return self._power_reflected def getIDN(self): """Fetch ID, serial and version number from the device and save it in the settings. """ result = self.__get_command('*IDN?').split(',') try: self.id = result[1][1:] self.serial = result[2] self.version = result[3] self.logger.debug(f'Device ID {self.id}, Serial {self.serial}, Version {self.version}') except IndexError: self.logger.error('Could not read IDN') @property def available(self): """Check if the device is available """ now = time.time() if now - self.__is_available_last > 1.0: try: self.getIDN() except DeviceNotAvailable: self.available = False else: self.available = True self.__is_available_last = time.time() if self.__is_available: if now - self.__cache_age > 5.0: self.__load_from_device() self.logger.info('Reload microwave generator cache') self.__cache_age = time.time() return self.__is_available @available.setter def available(self, value): if value: self.__is_available = True self.__is_available_last = time.time() else: self.__is_available = False self.__is_available_last = 0 self.__cache_age = 0 @property def is_available(self): return self.available
class DocumentManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def doc_exists(self, file_name, title): entries = self._db.search((where('file_name') == file_name) & (where('name') == title)) if entries: return True else: return False def is_doc_new(self, file_name): file_name_exists = self._db.search(where('file_name') == file_name) if not file_name_exists: return True return False ''' receives a translation file and checks if there are corresponding source files''' def is_translation(self, file_name, title, matched_files, actions): ''' check if the file is a translation file''' for myFile in matched_files: relative_path = actions.norm_path(myFile) myFileTitle = os.path.basename(relative_path) ''' only compare the file being checked against source files that have already been added ''' entry = self._db.get(where("file_name") == relative_path) if entry: ''' check the source file's download codes to see if the file being checked is a translation file ''' downloads = self.get_doc_downloads(relative_path) if downloads: for d in downloads: ''' append the download code to the source file for comparison ''' temp = myFileTitle.split(".") newString = temp[0]+"."+ d +"."+temp[1] if newString == title: return True return False ''' receives a source file and finds the source files associated with it ''' #def delete_local_translations(self, file_name, path, actions): def is_doc_modified(self, file_name, path): entry = self._db.get(where('file_name') == file_name) full_path = os.path.join(path, file_name) last_modified = os.stat(full_path).st_mtime if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified: return True return False def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name): entry = {'name': title, 'added': create_date, 'id': doc_id, 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name, 'downloaded': []} self._db.insert(entry) def update_document(self, field, new_val, doc_id): if type(new_val) is list: self._db.update(_update_entry_list(field, new_val), where('id') == doc_id) else: if type(new_val) is set: new_val = list(new_val) self._db.update({field: new_val}, where('id') == doc_id) def get_doc_by_prop(self, prop, expected_value): """ get documents by the specified property """ entry = self._db.get(where(prop) == expected_value) return entry def get_all_entries(self): return self._db.all() def get_doc_ids(self): """ returns all the ids of documents that the user has added """ doc_ids = [] for entry in self._db.all(): doc_ids.append(entry['id']) return doc_ids def get_file_names(self): """ returns all the file names of documents that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['file_name']) return file_names def get_names(self): """ returns all the names of documents that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['name']) return file_names def get_doc_name(self, file_name): """ returns the file name of a document for a given file path """ entry = self._db.get(where("file_name") == file_name) if entry: return entry['name'] else: return None def get_doc_locales(self, file_name): """ returns the target locales of a document for a given file """ locales = [] entry = self._db.get(where("file_name") == file_name) if entry: locales.append(entry['locales']) return locales def get_doc_downloads(self, file_name): """ returns all the downloaded translations for a given file """ entry = self._db.get(where("file_name") == file_name) if entry: downloads = entry['downloaded'] return downloads def remove_element(self, doc_id): self._db.remove(where('id') == doc_id) def clear_prop(self, doc_id, prop): """ Clear specified property of a document according to its type """ entry = self._db.get(where('id') == doc_id) if isinstance(entry[prop],str): self.update_document(prop,"",doc_id) elif isinstance(entry[prop],int): self.update_document(prop,0,doc_id) elif isinstance(entry[prop],list): self.update_document(prop,[],doc_id) elif isinstance(entry[prop],dict): self.update_document(prop,{},doc_id) def remove_element_in_prop(self, doc_id, prop, element): doc_prop = self.get_doc_by_prop('id', doc_id)[prop] if element in doc_prop: doc_prop.remove(element) self.update_document(prop, doc_prop, doc_id) def add_element_to_prop(self, doc_id, prop, element): doc_prop = self.get_doc_by_prop('id',doc_id)[prop] if element not in doc_prop: doc_prop.append(element) self.update_document(prop, doc_prop, doc_id) def clear_all(self): self._db.purge()
class DocumentManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def doc_exists(self, file_name, title): entries = self._db.search((where('file_name') == file_name) & (where('name') == title)) if entries: return True else: return False def is_doc_new(self, file_name): file_name_exists = self._db.search(where('file_name') == file_name) if not file_name_exists: return True return False def is_doc_modified(self, file_name, path): entry = self._db.get(where('file_name') == file_name) full_path = os.path.join(path, file_name) last_modified = os.stat(full_path).st_mtime if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified: return True return False def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name): entry = {'name': title, 'added': create_date, 'id': doc_id, 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name, 'downloaded': []} self._db.insert(entry) def update_document(self, field, new_val, doc_id): if type(new_val) is list: self._db.update(_update_entry_list(field, new_val), where('id') == doc_id) else: if type(new_val) is set: new_val = list(new_val) self._db.update({field: new_val}, where('id') == doc_id) def get_doc_by_prop(self, prop, expected_value): """ get documents by the specified property """ entry = self._db.get(where(prop) == expected_value) return entry def get_all_entries(self): return self._db.all() def get_doc_ids(self): """ returns all the ids of documents that user has added """ doc_ids = [] for entry in self._db.all(): doc_ids.append(entry['id']) return doc_ids def remove_element(self, doc_id): self._db.remove(where('id') == doc_id) def clear_all(self): self._db.purge()
def remove_pending_tip(id_tip): db = TinyDB(DATA_PATH + bot_config['unregistered_tip_user']) tip = Query() db.remove(tip.id == id_tip) db.close()
class Plugin(object): def __init__(self, pm): self.pm = pm self.modulename = 'academia' self.configPath = 'pluginsconfig/data_config-{0}_a.json'.format(self.modulename) #self.waclient = tungsten.Tungsten("V48XKW-W2RE7VWR99") #V48XKW-W2RE7VWR99 self.configDB = TinyDB(self.configPath) self.chatinstances = {} self.owm = pyowm.OWM("d458741c71140c009b31f4cfd05560ba") @staticmethod def register_events(): return [Events.Command("ask", Ranks.Default, "[query] Ask him a question or anything analytical"), Events.Command("w", Ranks.Default, "[query] Posts weather data."), Events.Command("academia.allow", Ranks.Admin), Events.Command("academia.block", Ranks.Admin)] async def handle_command(self, message_object, command, args): try: print("--{2}--\n[Noku-chat] {0} command from {1} by {3}".format(command, message_object.channel.name, arrow.now().format('MM-DD HH:mm:ss'), message_object.author.name)) except: print("[Noku]Cannot display data, probably emojis.") if self.configDB.contains(Query().chanallow == message_object.channel.id): ''' Add modules checks here ''' if command == "ask": await self.ask(message_object, args[1]) if command == "w": await self.weather(message_object, args[1]) #Do not modify or add anything below it's for permissions if command == "{0}.allow".format(self.modulename): await self.allowChan(message_object) if command == "{0}.block".format(self.modulename): await self.blockChan(message_object) ''' Add modules here ''' async def weather(self, message_object, args): if args != "": await self.pm.client.send_typing(message_object.channel) obs = self.owm.weather_at_place(args) wjson = json.loads(obs.to_JSON()) wobj = obs.get_weather() title = ":flag_{0}: **{1}, {2}**".format(wjson["Location"]["country"].lower(), wjson["Location"]["name"], wjson["Location"]["country"]) #em = discord.Embed(title=title, description=wjson["Weather"]["status"], colour=0x007AFF) em = discord.Embed(title=title, description="{6} | :thermometer: **{0}°C** (_{1}°F_) from **{2}** to **{3}°C**, wind at **{4}m/s**, humidity at **{5}%**".format(wobj.get_temperature("celsius")["temp"], wobj.get_temperature("fahrenheit")["temp"], wobj.get_temperature("celsius")["temp_min"], wobj.get_temperature("celsius")["temp_max"], wjson["Weather"]["wind"]["speed"], wjson["Weather"]["humidity"], wobj.get_status()), colour=0x007AFF) #em.add_field(name=":thermometer: Temperature", value=":black_small_square: Min: {0}°C[{3}°F]\n:black_small_square: Avg: {1}°C[{4}°F]\n:black_small_square: Max: {2}°C[{5}°F]\n".format(wobj.get_temperature("celsius")["temp_min"],wobj.get_temperature("celsius")["temp"],wobj.get_temperature("celsius")["temp_max"],wobj.get_temperature("fahrenheit")["temp_min"],wobj.get_temperature("fahrenheit")["temp"],wobj.get_temperature("fahrenheit")["temp_max"])) #em.add_field(name=":droplet: Humidity", value=":black_small_square: {0}%".format(wjson["Weather"]["wind"]["deg"], wjson["Weather"]["wind"]["speed"])) #em.add_field(name=":wind_chime: Wind", value=":arrow_upper_right: {0}\n:dash: :{1}m/s".format()) #em.set_footer(text="Noku-academia module version 2.0.1, weather version 0.4.0", icon_url=self.pm.client.user.avatar_url) await self.pm.client.send_message(message_object.channel, embed=em) else: await self.pm.client.send_message(message_object.channel, ':information_source:`Usage: [location] gets weather data for location.`'.format()) #Do not modify or add anything below it's for permissions async def allowChan(self, message_object): self.configDB.insert({'chanallow' : message_object.channel.id}); await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been allowed access to {0}`'.format(message_object.channel.name, self.modulename)) async def blockChan(self, message_object): self.configDB.remove(Query().chanallow == message_object.channel.id); await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been blocked access to {0}`'.format(message_object.channel.name, self.modulename))
class ProgramEngine: # pylint: disable=exec-used _instance = None def __init__(self): self._program = None self._log = "" self._programs = TinyDB("data/programs.json") query = Query() for dirname, dirnames, filenames, in os.walk(PROGRAM_PATH): dirnames for filename in filenames: if PROGRAM_PREFIX in filename: program_name = filename[len(PROGRAM_PREFIX):-len(PROGRAM_SUFFIX)] if self._programs.search(query.name == program_name) == []: logging.info("adding program %s in path %s as default %r", program_name, dirname, ("default" in dirname)) self._programs.insert({"name": program_name, "filename": os.path.join(dirname, filename), "default": str("default" in dirname)}) @classmethod def get_instance(cls): if not cls._instance: cls._instance = ProgramEngine() return cls._instance def prog_list(self): return self._programs.all() def save(self, program): query = Query() self._program = program program_db_entry = program.as_dict() program_db_entry["filename"] = os.path.join(PROGRAM_PATH, PROGRAM_PREFIX + program.name + PROGRAM_SUFFIX) if self._programs.search(query.name == program.name) != []: self._programs.update(program_db_entry, query.name == program.name) else: self._programs.insert(program_db_entry) f = open(program_db_entry["filename"], 'w+') json.dump(program.as_dict(), f) f.close() def load(self, name): query = Query() program_db_entries = self._programs.search(query.name == name) if program_db_entries != []: logging.info(program_db_entries[0]) f = open(program_db_entries[0]["filename"], 'r') self._program = Program.from_dict(json.load(f)) return self._program def delete(self, name): query = Query() program_db_entries = self._programs.search(query.name == name) if program_db_entries != []: os.remove(program_db_entries[0]["filename"]) self._programs.remove(query.name == name) def create(self, name, code): self._program = Program(name, code) return self._program def is_running(self, name): return self._program.is_running() and self._program.name == name def check_end(self): return self._program.check_end() def log(self, text): self._log += text + "\n" def get_log(self): return self._log
def removeByName(self, name): db = TinyDB(self.db_filename) query = Query() db.remove(query.name == name) db.close()
def clean_old_files(changelog_table: TinyDB, now: datetime): keep_from = now - timedelta(weeks=1) to_be_cleaned = changelog_table.search(where('datetime') <= keep_from) for entry in to_be_cleaned: os.remove(entry['path']) changelog_table.remove(where('path') == entry['path'])
#hier voeg je boeken toe of verwijdert men boeken while True: result = input("kies i of r (c om te sluiten): ") if result == 'i': naam_boek = input('naam boek?: ') auteur = input('naam auteur?: ') omschrijving = input('omschrijving boek?: ') db.insert({ 'naamBoek': naam_boek, 'auteur': auteur, 'omschrijving': omschrijving }) elif result == 'r': verwijder_boek = input('naam boek?: ') gezocht_boek = db.search(Boek.naamBoek == verwijder_boek) if len(gezocht_boek) == 1: db.remove(Boek.naamBoek == verwijder_boek) else: print('Boek bestaat niet!') elif result == 'c': break else: print('pick i of r')
class JobDB: """Keeps a database of jobs, with a MD5 hash that encodes the function name, version, and all arguments to the function. """ def __init__(self, path): self.db = TinyDB(path) self.lock = Lock() def get_result_or_attach(self, key, prov, running): job = Query() with self.lock: rec = self.db.get(job.prov == prov) if 'result' in rec: return 'retrieved', rec['key'], rec['result'] job_running = rec['key'] in running wf_running = rec['link'] in running.workflows if job_running or wf_running: self.db.update(attach_job(key), job.prov == prov) return 'attached', rec['key'], None print( "WARNING: unfinished job in database. Removing it and " " rerunning.", file=sys.stderr) self.db.remove(eids=[rec.eid]) return 'broken', None, None def job_exists(self, prov): job = Query() with self.lock: return self.db.contains(job.prov == prov) def store_result(self, key, result): job = Query() with self.lock: if not self.db.contains(job.key == key): return self.add_time_stamp(key, 'done') with self.lock: self.db.update({'result': result, 'link': None}, job.key == key) rec = self.db.get(job.key == key) return rec['attached'] def new_job(self, key, prov, job_msg): with self.lock: self.db.insert({ 'key': key, 'attached': [], 'prov': prov, 'link': None, 'time': { 'schedule': time_stamp() }, 'version': job_msg['data']['hints'].get('version'), 'function': job_msg['data']['function'], 'arguments': job_msg['data']['arguments'] }) return key, prov def add_link(self, key, ppn): job = Query() with self.lock: self.db.update({'link': ppn}, job.key == key) def get_linked_jobs(self, ppn): job = Query() with self.lock: rec = self.db.search(job.link == ppn) return [r['key'] for r in rec] def add_time_stamp(self, key, name): def update(r): r['time'][name] = time_stamp() job = Query() with self.lock: self.db.update(update, job.key == key)
def test_remove_all_fails(db: TinyDB): with pytest.raises(RuntimeError): db.remove()
class Proxy(object): def __init__(self, config): self.c = config self.ldb = None self.rdb = None self.tag = Query().noop() self.req = None if config.local: try: self.ldb = TinyDB(config.local, storage=CachingMiddleware(JSONStorage)) except Exception: self.ldb = TinyDB(storage=MemoryStorage) else: self.ldb = TinyDB(storage=MemoryStorage) if config.url: auth = None if config.user: auth = (config.user, click.prompt("password", hide_input=True)) if config.url.startswith("http"): dbclass = CouchDB elif config.url.startswith("mongodb"): dbclass = MongoDB try: self.rdb = dbclass(config.url, auth=auth, verify=config.verify) except Exception: self.rdb = None def set_tag(self, tag=None): self.tag = (where("tag") == tag) if (tag is not None) else Query().noop() def insert_multiple(self, docs): self.ldb.insert_multiple(docs) def contains(self, q=None, **kargs): if q is None: q = self.tag for k in kargs: q &= where(k) == kargs[k] if self.rdb: return self.rdb.contains(q._hash, **kargs) return self.ldb.contains(q) def search(self, q=None, **kargs): if q is None: q = self.tag else: q = self.tag & q for k in kargs: q &= where(k) == kargs[k] if self.rdb: return list(self.rdb.search(q._hash, **kargs)) return self.ldb.search(q) def get(self, q=None, **kargs): if q is None: q = self.tag for k in kargs: q &= where(k) == kargs[k] if self.rdb: return self.rdb.get(q._hash, **kargs) return self.ldb.get(q) def cleanup_local(self): D = {} for e in self.ldb.search(self.tag): k = "%s := %s" % (e["id"], e["val"]) if not k in D: D[k] = [e.doc_id] else: D[k].append(e.doc_id) for v in D.values(): if len(v) > 1: self.ldb.remove(doc_ids=v[1:]) def cleanup(self): self.rdb.cleanup(self) def find_matching_types(self, Locs, req=None, psize=0): if self.rdb is not None: self.rdb.find_matching_types(Locs, req, psize) return Locs def close(self): self.ldb.close()
return bytes_to_string(shorten_bytes) # scan share directory. flist = filter(lambda file: os.path.isfile(os.path.join(config['share_dir'], file)), os.listdir(config['share_dir'])) files = {byte_to_hex(md5((os.path.join(config['share_dir'], fname)))): fname for fname in flist if fname != os.path.basename(config['database'])} # remove records of which the file does not exist in share directory for file in db.all(): if file['md5'] in files: db.update({'name': files[file['md5']]}, File.md5==file['md5']) else: db.remove(File.md5==file['md5']) # move files in share directory to upload directory if not recorded for file in files: if not db.contains(File.md5==file): os.rename(os.path.join(config['share_dir'], files[file]), os.path.join(config['upload_dir'], files[file])) # record files in upload directory and move to share direcory def upload(fname=None): if fname: if os.path.isfile(os.path.join(config['upload_dir'], fname)): file = {} fmd5 = md5(os.path.join(config['upload_dir'], fname)) file['md5'] = byte_to_hex(fmd5)
class Plugin(object): def __init__(self, pm): self.pm = pm self.hugs = [ '(づ ̄ ³ ̄)づ', '(つ≧▽≦)つ', '(つ✧ω✧)つ', '(づ ◕‿◕ )づ', '(⊃。•́‿•̀。)⊃', '(つ . •́ _ʖ •̀ .)つ', '(っಠ‿ಠ)っ', '(づ◡﹏◡)づ' ] self.configPath = 'pluginsconfig/data_config-utils_a.json' self.configDB = TinyDB(self.configPath) self.macroPath = 'pluginsconfig/data_macro_a.json' self.macroDB = TinyDB(self.macroPath) self.chaninfoPath = 'pluginsconfig/data_channel-info_a.json' self.chaninfDB = TinyDB(self.chaninfoPath) #self.converter = CurrencyConverter() print("[Noku-utils]Initalizing Imgur Stuff...") self.client = ImgurClient("43bdb8ab21d18b9", "fcba34a83a4650474ac57f6e3f8b0750dd26ecf5") print("[Noku-utils]Retrieving Images...") self.wmimages = self.client.subreddit_gallery("wholesomememes") self.catimages = self.client.subreddit_gallery("catsstandingup") self.dogimages = self.client.subreddit_gallery("rarepuppers") self.kanye = [ "I miss the old Kanye, straight from the Go Kanye\nChop up the soul Kanye, set on his goals Kanye", "I hate the new Kanye, the bad mood Kanye\nThe always rude Kanye, spaz in the news Kanye", "I miss the sweet Kanye, chop up the beats Kanye", "I gotta say, at that time I'd like to meet Kanye", "See, I invented Kanye, it wasn't any Kanyes\nAnd now I look and look around and there's so many Kanyes", "I used to love Kanye, I used to love Kanye\nI even had the pink polo, I thought I was Kanye", "What if Kanye made a song about Kanye\nCalled 'I Miss The Old Kanye'? Man, that'd be so Kanye", "That's all it was Kanye, we still love Kanye\nAnd I love you like Kanye loves Kanye" ] self.kanyeOrder = 0 self.pats = [ '{1}(ノ_<。)ヾ(´▽`){0}', '{1}。・゚・(ノД`)ヽ( ̄ω ̄ ){0}', '{1}ρ(-ω-、)ヾ( ̄ω ̄; ){0}', '{0}ヽ( ̄ω ̄(。。 )ゝ{1}', '{0}(*´I`)ノ゚(ノД`゚)゚。{1}', '{0}ヽ(~_~(・_・ )ゝ{1}', '{1}(ノ_;)ヾ(´∀`){0}', '{1}(;ω; )ヾ(´∀`* ){0}', '{0}(*´ー)ノ(ノд`){1}', '{0}(´-ω-`( _ _ ){1}', '{0}(っ´ω`)ノ(╥ω╥){1}', '{0}(o・_・)ノ”(ノ_<、){1}' ] self.userqueue = [] @staticmethod def register_events(): return [ Events.Command("ping", Ranks.Default, "Pings the bot, nothing special"), Events.Command("hug", Ranks.Default, "[@username] Sends a hug to a user."), Events.Command("system.purgeAllDM", Ranks.Admin, "(!Admin use only)~~Cause people are paranoid"), Events.Command("pat", Ranks.Default, "[@username] Sends a pat to a user."), Events.Command("info.set", Ranks.Admin), Events.Command("info.delete", Ranks.Admin), Events.Command("info", Ranks.Default, "Shows channel info"), Events.Command("meme", Ranks.Default, "posts a wholesome meme"), Events.Command("exch", Ranks.Default, "[ammount] [from] [to] converts currency"), Events.Command("ud", Ranks.Default, "[query] Urban Dictionary"), Events.Command("lang", Ranks.Default, "[query] Tries to determine the language"), Events.Command("cats", Ranks.Default, "Posts a cat"), Events.Command("emotext", Ranks.Default, "Emojifies a text"), Events.Command( "poll", Ranks.Default, "[question]/[item1]/[item2]/[item3]/[item..] posts a poll and its corresponding reactions." ), Events.Command("dogs", Ranks.Default, "Posts a dog"), Events.Command("old", Ranks.Default, "Kanye Kanye Kanye"), Events.Command("qjoin", Ranks.Default, "Join Queue"), Events.Command("qdone", Ranks.Default, "Finish Queue"), Events.Command("qview", Ranks.Default, "View Queue"), Events.Command("qkick", Ranks.Admin, "[Admin] Kick user from Queue"), Events.Command("qreset", Ranks.Default, "[Admin] Reset Queue"), Events.Command("qhelp", Ranks.Default, "View Queue"), Events.Command("pins", Ranks.Default, "[#channel] shows pins from a specified channel."), Events.Command("print_avatars_to_console", Ranks.Admin, "[secret stuff]"), Events.Command("utils.allow", Ranks.Admin), Events.Command("restart", Ranks.Admin), Events.Command("utils.block", Ranks.Admin) ] async def handle_command(self, message_object, command, args): try: print("--{2}--\n[Noku-utils] {0} command from {1} by {3}".format( command, message_object.channel.name, arrow.now().format('MM-DD HH:mm:ss'), message_object.author.name)) except: print("[Noku]Cannot display data, probably emojis.") #print(args) config = Query() if self.configDB.contains(config.chanallow == message_object.channel.id ) or message_object.channel.is_private: if command == "ping": await self.ping(message_object, "Pong") if command == "system.purgeAllDM": await self.purge(message_object) elif command == "pins": await self.showpins(message_object) elif command == "poll": await self.makepoll(message_object, args[1]) elif command == "hug": await self.hug(message_object) elif command == "pat": await self.pat(message_object) elif command == "emotext": await self.emotext(message_object, args[1]) elif command == "exch": await self.currency(message_object, args[1]) elif command == "meme": await self.postmeme(message_object, self.wmimages) elif command == "ud": await self.urban(message_object, args[1]) elif command == "lang": await self.lang(message_object, args[1]) elif command == "cats": await self.postmeme(message_object, self.catimages) elif command == "dogs": await self.postmeme(message_object, self.dogimages) elif command == "old": await self.old(message_object, args[1]) elif command == "info.set": await self.chaninfo(message_object, args[1], "set") elif command == "info.delete": await self.chaninfo(message_object, args[1], "delete") elif command == "info": await self.chaninfo(message_object, args[1], "info") elif command == "restart": await self.shutdown(message_object) elif command == "print_avatars_to_console": await self.getuser(message_object) if command == "qjoin": await self.qjoin(message_object) if command == "qdone": await self.qdone(message_object) if command == "qview": await self.qview(message_object) if command == "qkick": await self.qkick(message_object) if command == "qreset": await self.qreset(message_object) if command == "qhelp": await self.qhelp(message_object) if command == "utils.allow": await self.allowChan(message_object) if command == "utils.block": await self.blockChan(message_object) async def qhelp(self, message_object): em = discord.Embed( title="Queue Help", description= "**~qjoin** : _Join Queue_\n**~qdone** : _Finish your turn_\n**~qview** : _Display the users in Queue_\n**~qkick** <@user> : _Kicks user from Queue (Admin only)_\n**~qreset** : _Resets Queue (Admin only)_", colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) async def qkick(self, message_object): try: self.userqueue.remove(message_object.mentions[0]) em = discord.Embed( title="Queue", description="{0} has been kicked from the Queue!".format( message_object.mentions[0].name), colour=0x007AFF) except: em = discord.Embed( title="Queue", description= "No user specified or the user is not in the queue!", colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) async def qreset(self, message_object): self.userqueue = [] em = discord.Embed(title="Queue", description="The queue has been emptied!", colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) async def qview(self, message_object): if len(self.userqueue) > 0: display = "There's currently {0} users in the queue.\n---\n".format( len(self.userqueue)) count = 1 for user in self.userqueue: display += "{0}. **{1}**\n".format(count, user.name) count += 1 em = discord.Embed(title="Queue", description=display, colour=0x007AFF) else: em = discord.Embed(title="Queue", description="The queue is empty!", colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) async def qdone(self, message_object): try: self.userqueue.remove(message_object.author) em = discord.Embed(title="Queue", description="You Successfuly left the Queue!", colour=0x007AFF) except: em = discord.Embed(title="Queue", description="You're not in the Queue!", colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) await self.qview(message_object) async def qjoin(self, message_object): if not message_object.author in self.userqueue: self.userqueue.append(message_object.author) em = discord.Embed( title="Queue", description="{0} has been added to the queue!".format( message_object.author.name), colour=0x007AFF) else: em = discord.Embed( title="Queue", description="{0} is already in the queue!".format( message_object.author.name), colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) await self.qview(message_object) async def currency(self, message_object, args): try: ammount = int(args.split(" ")[0]) fr = args.split(" ")[1] to = args.split(" ")[2] re = requests.get( "https://currency-api.appspot.com/api/{0}/{1}.json".format( fr.lower(), to.lower())) if re.json()["success"] or re.json()["success"] == "true": converted = ammount * float(re.json()["rate"]) description = ":currency_exchange: **{0:,.2f} {1}** Equals **{2:,.2f} {3}**".format( ammount, fr.upper(), converted, to.upper()) #em = discord.Embed(title=title, description=wjson["Weather"]["status"],) em = discord.Embed(title="Currency Exchange", description=description, colour=0x007AFF) em.set_footer(text="Current Rate: 1 {0} = {1} {2}".format( fr.upper(), re.json()["rate"], to.upper())) else: description = ":exclamation: _Invalid currency specified!_" #em = discord.Embed(title=title, description=wjson["Weather"]["status"], colour=0x007AFF) em = discord.Embed(title="Currency Exchange", description=description, colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) except: description = ":information_source: Usage: [ammount] [from] [to]\nEx. `~exch 100 jpy usd`" em = discord.Embed(title="Currency Exchange", description=description, colour=0x007AFF) await self.pm.client.send_message(message_object.channel, embed=em) async def emotext(self, message_object, args): string = "" number = [ "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine" ] for x in args.lower(): try: if x in "qwertyuiopasdfghjklzxcvbnm": string += ":regional_indicator_{0}: ".format(x) if x == " ": string += "\n " if x in "1234567890": string += ":{0}: ".format(number[int(x)]) except: pass await self.pm.client.send_message(message_object.channel, string) pass async def lang(self, message_object, args): iso = langdetect.detect(args) x = "```{0}```Language result: {1}[{2}]".format( args, iso639.to_name(iso), iso639.to_native(iso)) await self.pm.client.send_message(message_object.channel, x) async def urban(self, message_object, args): catalog = urbandict.define(args) em = discord.Embed(title='Urban Dictionary', description="Query: " + args, colour=0x007AFF) em.set_author(name='{0}\'s Result'.format(message_object.author.name)) em.set_footer(text="Noku-utils version 0.3", icon_url=self.pm.client.user.avatar_url) em.add_field(name="Definiton", value=catalog[0]['def']) em.add_field(name="Example", value=catalog[0]['example']) await self.pm.client.send_message(message_object.channel, embed=em) async def purge(self, message_object): print("Purge: A") for channels in self.pm.client.private_channels: if channels.is_private: print("Purge: B") #try: print("Purge: B.5") async for message in self.pm.client.logs_from(channels): print("Purge: C") if message.author == self.pm.client.user: try: print("Purge: D") print("Delete:{0}".format(message.content)) await self.pm.client.delete_message(message) except: print("Purge: D.5") pass #except: # pass pass async def makepoll(self, message_object, args): reactions = ["🇦", "🇧", "🇨", "🇩", "🇪", "🇫", "🇬", "🇭", "🇮", "🇯"] letter = "ABCDEFGHIJ" items = args.split("/") content = ":pencil:|**{0}**\n".format(items[0]) count = 0 for x in items[1:]: content += ":black_small_square: **{0}.** `{1}`\n".format( letter[count], x) count += 1 message = await self.pm.client.send_message(message_object.channel, content) for x in range(0, count): await self.pm.client.add_reaction(message, reactions[x]) await self.pm.client.delete_message(message_object) pass async def ping(self, message_object, reply): speed = datetime.datetime.now() - message_object.timestamp await self.pm.client.send_message( message_object.channel, reply + " " + str(round(speed.microseconds / 1000)) + "ms") async def old(self, message_object, args): if ".order" in args: args = args.replace(".order", args) await self.pm.client.send_message( message_object.channel, self.kanye[self.kanyeOrder].replace("Kanye", args)) self.kanyeOrder = self.kanyeOrder + 1 if self.kanyeOrder > len(self.kanye) - 1: self.kanyeOrder = 0 else: await self.pm.client.send_message( message_object.channel, random.choice(self.kanye).replace("Kanye", args)) async def getuser(self, message_object): for x in self.pm.client.get_all_members(): print(x.avatar_url) async def hug(self, message_object): if len(message_object.mentions) != 0: await self.pm.client.send_message( message_object.channel, "{0} {1} {2}".format(message_object.author.mention, random.choice(self.hugs), message_object.mentions[0].mention)) else: await self.pm.client.send_message( message_object.channel, ":exclamation:`Welp, that\'s not a valid user!`") async def pat(self, message_object): if len(message_object.mentions) != 0: await self.pm.client.send_message( message_object.channel, random.choice(self.pats).format( message_object.author.mention, message_object.mentions[0].mention)) else: await self.pm.client.send_message( message_object.channel, ":exclamation:`Welp, that\'s not a valid user!`") async def macroadd(self, message_object, args): trigger = args.split(" ")[0] self.macroDB.insert({'trigger': trigger, 'data': args[len(trigger):]}) await self.pm.client.send_message( message_object.channel, ":information_source:`{0} has been added as a macro!`".format( trigger)) async def macrodel(self, message_object, args): self.macroDB.remove(Query().trigger == args) await self.pm.client.send_message( message_object.channel, ":information_source:`{0} has been deleted! Probably..`".format( args)) async def helpUtil(self, message_object): await self.pm.client.send_message( message_object.channel, ":information_source: Help detail for Utilities") await self.pm.client.send_message( message_object.channel, "```~hug @user\n~ping\n~macro.add [trigger] [data]\n~macro.delete [trigger]\n~macro [trigger](alt. ~m)\n~macro.assigned```" ) async def macroShow(self, message_object, args): try: await self.pm.client.send_message( message_object.channel, self.macroDB.search(Query().trigger == args)[0]["data"]) except: await self.pm.client.send_message( message_object.channel, ":exclamation:`Welp, that\'s not a valid macro!`") async def macroShowAssigned(self, message_object): macros = self.macroDB.search(Query().trigger != "") x = "```" for m in macros: x = x + m['trigger'] + " " x = x + "```" await self.pm.client.send_message(message_object.channel, x) async def chaninfo(self, message_object, args, trigger): #trigger = args.split(" ")[0] if trigger == "set": self.chaninfDB.remove(Query().channel == message_object.channel.id) self.chaninfDB.insert({ 'channel': message_object.channel.id, 'data': args }) await self.pm.client.send_message( message_object.channel, ":information_source:`{0} info has been updated!`".format( trigger)) elif trigger == "delete": self.chaninfDB.remove(Query().channel == message_object.channel.id) await self.pm.client.send_message( message_object.channel, ":information_source:`{0} info has been removed!`".format( trigger)) else: try: await self.pm.client.send_message( message_object.channel, self.chaninfDB.search(Query().channel == message_object. channel.id)[0]["data"]) except: await self.pm.client.send_message( message_object.channel, ":exclamation:No info! `~info set [message]` to set a channel info" ) async def postmeme(self, message_object, imagelist): await self.pm.client.send_message( message_object.channel, "{0}".format(random.choice(imagelist).link)) async def showpins(self, message_object): try: if len(message_object.channel_mentions) > 0: for x in await self.pm.client.pins_from( message_object.channel_mentions[0]): em = discord.Embed(title="\n", description=x.content, colour=0x007AFF) em.set_author(name='{0} - {1}'.format( x.author.name, arrow.get(x.timestamp).format('MM-DD HH:mm'))) em.set_thumbnail(url=x.author.avatar_url) await self.pm.client.send_message(message_object.channel, embed=em) pass else: await self.pm.client.send_message( message_object.channel, ":exclamation:No channel specified! Usage: `~pins [#channel]`" ) except: await self.pm.client.send_message( message_object.channel, ":exclamation:`Error retrieving pins!`") pass async def allowChan(self, message_object): self.configDB.insert({'chanallow': message_object.channel.id}) await self.pm.client.send_message( message_object.channel, ':information_source:`Noku Bot-utils has been allowed access to {0}`' .format(message_object.channel.name)) async def blockChan(self, message_object): self.configDB.remove(Query().chanallow == message_object.channel.id) await self.pm.client.send_message( message_object.channel, ':information_source:`Noku Bot-utils has been blocked access to {0}`' .format(message_object.channel.name)) async def shutdown(self, message_object): game = discord.Game() game.name = "Restarting...".format() await self.pm.client.change_presence(game=game, afk=False) await self.pm.client.send_message( message_object.channel, ':information_source:`さようなら {0}! Noku bot is rebooting! <3`'. format(message_object.author.name)) exit()
class WSDaemon(): def __init__(self, options): self.kb_listener = keyboard.Listener(on_press=self.listener_on_press) self.kb_controller = keyboard.Controller() self.buffer_arr = [] self.buffer_limit = int(options['buffer_limit']) self.expansion_file = options['expansion_file'] self.trigger_prefix = options['trigger_prefix'] self.cheatsheet_file = options['cheatsheet_file'] self.expansion_arr = [] self.load_keywords() def init_db(self): ##Try to close DB, then load a new copy. try: self.db_handle.close() except: pass self.db_handle = TinyDB(self.expansion_file, sort_keys=True, indent=1) def start(self): self.kb_listener.start() def listener_on_press(self, key_obj): self.update_buffer(key_obj) self.parse_buffer() def update_buffer(self, key_obj): if isinstance(key_obj, keyboard.KeyCode): ##Drop oldest character from buffer if we're over limit. char = key_obj.char if len(self.buffer_arr) + 1 > self.buffer_limit + len( self.trigger_prefix): self.buffer_arr.pop(0) ##Add character to buffer self.buffer_arr.append(char) ##Delete the last character if backspace is pressed and array is not empty. elif key_obj == keyboard.Key.backspace and len(self.buffer_arr): self.buffer_arr.pop() def parse_buffer(self): buffer_string = "".join(self.buffer_arr) for expansion in self.expansion_arr: if buffer_string.endswith(self.trigger_prefix + expansion["trigger"]): self.execute_expansion(expansion["trigger"], str(expansion["text"])) ##Ideally I think the buffer should be cleared, but the typing ##appears to occur asyncronously. So even if I clear the buffer ##after calling the "press" functions, the keypresses don't ##appear to register until returning to the main loop. ##However, in practice it doesn't seem to cause an issue. def load_keywords(self): self.init_db() del self.expansion_arr[:] for row in iter(self.db_handle): self.expansion_arr.append(row) self.create_cheatsheet() def save_expansion(self, trigger, expansion): if not self.validate_unique_trigger(trigger): return False self.db_handle.insert({"trigger": trigger, "text": expansion}) self.load_keywords() return True def delete_expansion(self, trigger): trigger_query = Query() ##Only try to delete if it already exists... if validate_unique_trigger(trigger): self.db_handle.remove(trigger_query == trigger) def validate_unique_trigger(self, trigger): trigger_query = Query() ##If there are no results we want to evaluate to true, false if ##anything is returned: return not self.db_handle.search(trigger_query.trigger == trigger) def execute_expansion(self, trigger, expansion): ##Erase the typed trigger, spit out the expansion. delete_length = len(self.trigger_prefix + trigger) for i in range(delete_length): self.kb_controller.press(keyboard.Key.backspace) for char in expansion: self.kb_controller.press(char) def create_cheatsheet(self): cs_generator = CheatSheetGenerator(self.cheatsheet_file) for expansion in self.expansion_arr: trigger = expansion["trigger"] text = expansion["text"] cs_generator.add_entry(trigger, text) cs_generator.write_output()
class Remote(): def __init__(self): self.db = TinyDB("backend/database.json") self.query = Query() print("Loaded Database") self.valid_types = ["SimpleOutput", "MotionSensor", "Switch", "AlarmSystem"] self.remotes = {} for remote in self.to_dict(): self._add_locally(remote) print("created remotes") self.time_from_debug = int(time()) # runs in parallel with the flask server, for physically # displaying the lights def run(self): print("Running Remotes") while True: try: sleep(0.5) # Prevents the system from going too fast to_debug = self._show_debug_output() self._run_the_remotes(to_debug) except RuntimeError as e: print(e) print("Continuing anyway") continue # Debug output def _show_debug_output(self): current_time = int(time()) debug = current_time - self.time_from_debug >= 5 if debug: print("") print("Time is: ", current_time) print("size of db:", len(self.db)) print("size of remotes:", len(self.remotes)) self.time_from_debug = current_time return debug # Runs all the remotes def _run_the_remotes(self, debug=True): for remote in self.to_dict(): # use the database to find things pin = remote['pin'] if pin in self.remotes: self.remotes[pin].input(remote) self.remotes[pin].output(self.db, self.query) if debug: print("db", remote) # checks if it's a duplicate def _check_for_duplicate_pin(self, dic={}, pin=None): if pin is None: if "pins" in dic: # a list of pin inside the dic try: for pin_key in dic["pins"]: self._check_for_duplicate_pin(pin=dic[pin_key]) except ValueError as e: raise e else: result = self.get_remote_data(dic["pin"]) else: result = self.get_remote_data(pin) if result is not None: # GPIO duplicate pin error raise ValueError("GPIO Pin in use either" + " change this pin or delete" + " the remote currently using " + " this pin") # pass a name into method, will return relevant classs def get_relevant_type(self, remote_type): if remote_type in self.valid_types: return getattr(backend.remote_object, remote_type) else: return None # adds to remote dictionary only. Should onyl be used during init def _add_locally(self, remote): remote_class = self.get_relevant_type(remote["type"]) if remote_class is not None: try: import copy r = remote_class(copy.deepcopy(remote)) self.remotes[remote['pin']] = r except Exception as e: raise e # adds to the dictionary and database def add(self, remote): try: print(remote) self._check_for_duplicate_pin(dic=remote) self._add_locally(remote) self.db.insert(remote) print("# of remotes is:", len(self.remotes)) except Exception as e: raise e # toggles a certain key def toggle(self, pin, key="keep_on"): if type(pin) is not int: pin = int(pin) result = self.get_remote_data(pin) if result[key]: new_bool = False else: new_bool = True self.update_remote(pin, {key: new_bool}) # self.db.update({key: new_bool}, self.query["pin"] == pin) # deletes the remtoe from local memory def _delete_locally(self, pin): if type(pin) is not int: pin = int(pin) try: self.remotes[pin].close() # Safely remove device self.remotes.pop(pin) except NotImplementedError as e: raise e # Delets from db and local copy def delete(self, pin): if type(pin) is not int: pin = int(pin) try: self.db.remove(self.query["pin"] == pin) self._delete_locally(pin) except NotImplementedError as e: raise e # change pin locally def _change_pin_locally(self, pin, dic): pin = int(pin) # the old pin self.remotes[int(dic["pin"])] = self.remotes.pop(pin) # Updates database def update_remote(self, pin, dic): if type(pin) is not int: pin = int(pin) if "pin" in dic and int(dic["pin"]) != pin: # switching pins try: self._check_for_duplicate_pin(dic=dic) self._change_pin_locally(pin, dic) except ValueError as e: raise e self.db.update(dic, self.query["pin"] == pin) # Returns value from db by pin def get_remote_data(self, pin): if type(pin) is not int: pin = int(pin) result = self.db.get(self.query["pin"] == pin) return result def to_dict(self): return self.db.all()