Example #1
0
class TinydbREST(object):

    class Request(object):
        def __init__(self, status_code):
            self.status_code = status_code

    def __init__(self, path, table, pk=None):
        self.table = table
        self.db = TinyDB(path, storage=_tdbserialization).table(table)
        self.pk = pk

    def _getpk(self):
        r = self.db.get(Query().name==self.pk)
        if r is None:
            # todo: return 404 not found error. on GET and _('pipe').post()
            raise IOError('{} Not found'.format(self.pk))
        elif 'data' in r.keys():
            return r['data']
        else:
            return r

    def get(self):
        if self.pk is not None:
            return self.Request(200), self._getpk()
        else:
            return self.Request(200), self.db.all()

    def post(self, request_body):
        if self.pk is None:
            name = request_body['name']
            if name in ['pipes','remotes']:
                raise APIError('{} is reserved, use another name'.format(name))
            if self.db.contains(Query().name == name):
                raise APIError('entry {} already exists, needs to be unique'.format(name))
            self.db.insert(request_body)
            self.pk = name
            return self.Request(201), self._getpk()
        else:
            name = self.pk
            self.db.upsert({'name': name, 'data': request_body}, Query().name == self.pk)
            return self.Request(200), self._getpk()

    def put(self, request_body):
        if self.pk is not None:
            self.db.upsert(request_body, Query().name == self.pk)
            return self.Request(200), self._getpk()

    def patch(self, request_body):
        if self.pk is not None:
            dict_ = self._getpk()
            dict_.update(request_body)
            return self.put(dict_)

    def delete(self):
        if self.pk is not None:
            self.db.remove(Query().name == self.pk)
            return self.Request(204),not self.db.contains(Query().name == self.pk)
Example #2
0
class Database(object):
    def __init__(self, databaseName=None):

        if os.path.exists(databaseName):
            databasePath = databaseName
        else:
            databasePath = SUBPATH + '/' + databaseName + '.json'

            if not os.path.exists(SUBPATH):
                os.makedirs(SUBPATH)

        self.db = TinyDB(databasePath)
        self.Eyeball = Query()

    def getImage(self, identifier):
        return self.db.search(self.Eyeball.fileName == identifier)

    def addEyeball(self, eyeBallObj):

        condition = self.Eyeball.fileName == eyeBallObj.getFileName()

        if self.db.contains(condition):
            el = self.db.get(condition)
            self.db.update(eyeBallObj.getDict(), eids=[el.eid])
            print 'Edited existing item with eid: ' + str(el.eid)

        else:
            retVal = self.db.insert(eyeBallObj.getDict())
            print 'Added new item with eid: ' + str(retVal)

    def eyeBallExists(self, fileName):
        return self.db.contains(self.Eyeball.fileName == fileName)

    def getTruth(self, identifier):
        entry = self.getImage(identifier)

        x = entry[0]['truth']['x']
        y = entry[0]['truth']['y']

        # some times the value is stored as a unicode string, we need an int
        if isinstance(x, basestring):
            x = int(x)
            y = int(y)

        validEntry = False if entry is None or x == -1 or y == -1 else True

        # if (entry is None or
        #     x == -1 or
        #     y == -1):
        #     return False, (x, x)

        return validEntry, (x, y)

    def getSearchFileMatch(self, query):
        return self.db.search(self.Eyeball.fileName.matches(query))
Example #3
0
class TodoDB:
    def __init__(self) -> None:
        self._db = TinyDB("db.json", storage=CachingMiddleware(JSONStorage))

    def __exit__(self, exc_type, exc_value, traceback) -> None:
        self._db.close()

    async def find_todo(self, todo_id: int) -> Optional[TodoItem]:
        if self._db.contains(doc_id=todo_id):
            db_rec = self._db.get(doc_id=todo_id)

            return TodoItem(
                todo_id=db_rec.doc_id,
                name=db_rec["name"],
                description=db_rec["description"],
                completed=db_rec["completed"],
            )
        return None

    async def add_todo(self, todo_item: TodoItemIn) -> Optional[TodoItem]:
        new_todo_in = todo_item.copy()
        new_id = self._db.insert(new_todo_in.dict())
        new_todo = TodoItem(
            todo_id=new_id,
            name=todo_item.name,
            description=todo_item.description,
            completed=todo_item.completed,
        )
        return new_todo

    async def update_todo(
        self, todo_id: int, todo_item: TodoItem
    ) -> Optional[TodoItem]:
        if self._db.contains(doc_id=todo_id):
            self._db.update(
                {
                    "name": todo_item.name,
                    "description": todo_item.description,
                    "completed": todo_item.completed,
                },
                doc_ids=[todo_id],
            )
            return TodoItem(
                todo_id=todo_id,
                name=todo_item.name,
                description=todo_item.description,
                completed=todo_item.completed,
            )
        return None

    async def remove_todo(self, todo_id: int) -> Optional[int]:
        if self._db.contains(doc_id=todo_id):
            self._db.remove(doc_ids=[todo_id])
            return todo_id
        return None
Example #4
0
def notification_get(server, channel=None):
    notificationlist = TinyDB("notifications/NOTIFICATIONS" + server.id +
                              ".json")
    if not notificationlist.contains(query["channel"] == channel.id):
        notificationlist.insert({"channel": channel.id, "commands": []})
    return notificationlist.search(
        query["channel"] == channel.id)[0]["commands"]
def CheckMessageForReactAssignment(msgID: int):
    db = TinyDB('data/databases/reactbasedroles.json')
    queryBuilder = Query()
    if db.contains(queryBuilder.msgID == msgID):
        return True
    else:
        return False
Example #6
0
class Proxy(object):
    def __init__(self, config):
        self.c = config
        self.ldb = None
        self.rdb = None
        self.tag = Query()
        self.req = None
        if config.local:
            try:
                self.ldb = TinyDB(config.local,
                                  storage=CachingMiddleware(JSONStorage))
            except:
                self.ldb = TinyDB(storage=MemoryStorage)
        else:
            self.ldb = TinyDB(storage=MemoryStorage)
        if config.url:
            auth = None
            if config.user:
                auth = (config.user, click.prompt('password', hide_input=True))
            if config.url.startswith('http'):
                dbclass = CouchDB
            elif config.url.startswith('mongodb'):
                dbclass = MongoDB
            try:
                self.rdb = dbclass(config.url, auth=auth, verify=config.verify)
            except:
                self.rdb = None

    def set_tag(self, tag=None):
        self.tag = (where('tag').search(tag)) if tag else Query()

    def insert_multiple(self, docs):
        self.ldb.insert_multiple(docs)

    def contains(self, q=None, **kargs):
        if q is None: q = self.tag
        for k in kargs:
            q &= (where(k) == kargs[k])
        if self.rdb:
            return self.rdb.contains(q.hashval, **kargs)
        return self.ldb.contains(q)

    def search(self, q=None, **kargs):
        if q is None: q = self.tag
        for k in kargs:
            q &= (where(k) == kargs[k])
        if self.rdb:
            return list(self.rdb.search(q.hashval, **kargs))
        return self.ldb.search(q)

    def get(self, q=None, **kargs):
        if q is None: q = self.tag
        for k in kargs:
            q &= (where(k) == kargs[k])
        if self.rdb:
            return self.rdb.get(q.hashval, **kargs)
        return self.ldb.get(q)

    def close(self):
        self.ldb.close()
Example #7
0
class DartsMatchDao:
    __instance = None

    @staticmethod
    def get_instance():
        if DartsMatchDao.__instance is None:
            with threading.Lock():
                if DartsMatchDao.__instance is None:  # Double locking mechanism
                    DartsMatchDao()
        return DartsMatchDao.__instance

    def __init__(self):
        if DartsMatchDao.__instance is not None:
            raise Exception("This is a singleton!")
        else:
            DartsMatchDao.__instance = self
        self.db = TinyDB('db.json')
        self.lock = threading.Lock()
        self.rand = random.random()

    def add(self, match):
        self.lock.acquire()

        time.sleep(4)

        Match = Query()
        if not self.db.contains(Match.player1 == match.player1):
            self.db.insert({'type': match.type, 'player1': match.player1, 'player2': match.player2})

        print('Insert attempted on ' + match.player1 + '    ' + str(self.rand))

        self.lock.release()
def subscribe():
    db = TinyDB('database.json')
    try:
        if 'email' in session:
            link = request.form.get('link')
            source = request.form.get('source')
            t = int(request.form.get('time'))
            name = request.form.get('name')
            if db.contains((where('name') == name)
                           & (where('email') == session['email'])
                           & (where('link') == link) & (where('time') == t)):
                return 'Notification already set before'
            if db.insert({
                    "type": "notification",
                    "name": name,
                    "email": session['email'],
                    "link": link,
                    "source": source,
                    "time": t
            }):
                return 'Notification set'
            else:
                return 'Some error occurred'
        else:
            return 'You must login to subscribe to notifications'
    except:
        return 'Some error occurred'
Example #9
0
class TweetTests(TestCase):
    def setUp(self):
        self.db = TinyDB('testdb.json')
        self.dbQuery = Query()

    def test_should_validate_price(self):
        with self.assertRaises(PriceValidationError):
            tweet = Tweet(id=1, price=43)
            tweet = Tweet(id=1, price='abc1')

    def test_should_write_to_db(self):
        tweet = Tweet(id=1)
        tweet.write_to_db(self.db)
        self.assertTrue(self.db.contains(self.dbQuery.id == 1))

    def test_should_update_db_if_exists(self):
        tweet = Tweet(id=1)
        tweet.write_to_db(self.db)
        tweet.set_price(2)
        tweet.write_to_db(self.db)
        self.assertEquals(self.db.get(self.dbQuery.id == 1)['price'], 2)

    def test_should_not_insert_duplicate_ids(self):
        tweet1 = Tweet(id=1)
        tweet1.write_to_db(self.db)
        tweet2 = Tweet(id=1, price=1)
        tweet2.write_to_db(self.db)
        self.assertEquals(len(self.db.search(self.dbQuery.id == 1)), 1)

    def tearDown(self):
        self.db.close()
        if os.path.exists('testdb.json'):
            os.remove('testdb.json')
Example #10
0
def new_entry():
    print('-------------------------------------------------------------')
    print('Opening database for new entry...')
    name = input('Enter name of student-')
    nof = input("Enter name of student's father-")
    nom = input("Enter name of student's mother-")
    dob = input('Enter Date Of Birth of the student-')
    y = input('Enter Year of Birth...')
    bg = input("Enter student's Blood-group-")
    cl = input('Enter class in which the student is being enrolled in. -')
    ph = input('Enter phone no.-')
    doa = datetime.date(datetime.now())
    ids = customid()
    admn = entryno() + 1
    db = TinyDB(path)
    now = datetime.now()
    age = now.year - int(y)
    try:
        db.insert({'Admission no.':admn,'Name':name,'id':ids,"Father's Name":nof,"Mother's Name":nom,\
'Date of Birth':dob,'Age':age,'Blood Group':bg,'Class':cl,'Date of Admission':str(doa),'Phone':ph})
        print('Added the student to database...')
    except Exception as e:
        print('A minor error occured...')
        print(e)
    db2 = TinyDB(path2)
    m = Query()
    if not db2.contains(m['Admission no.']):
        db2.insert({'Admission no.': 0})
    else:
        docs = db2.search(m['Admission no.'])
        for doc in docs:
            doc['Admission no.'] += 1
        db2.write_back(docs)
Example #11
0
 async def verify(self, context, *, league_name: str = None):
     """$verify [league name]: Add league name, and confirms you agree to rules"""
     # if no name given, do nothing
     if league_name == None:
         return
     db = TinyDB(variables.db_location, default_table=variables.table_name)
     # if db contains user already do nothing
     if db.contains(self.Users.discord_id == context.message.author.id):
         db.close()
         return
     try:
         # if all above are false get mod role and verified role and the mod notif channel
         mod_notif_channel = discord.utils.get(
             context.message.server.channels,
             id=variables.club_invite_channel)
         verified_role = discord.utils.get(
             context.message.author.server.roles, name=variables.verified)
         mod_role = discord.utils.get(context.message.server.roles,
                                      name=variables.mod_role)
         lol_names_channel = discord.utils.get(
             context.message.server.channels,
             id=variables.verify_channel_id)
         # delete the ~verify command
         await self.bot.delete_message(context.message)
         # replace everything with the verified role
         await self.bot.replace_roles(context.message.author, verified_role)
         # pm's the welcome message
         fmt = '{0.mention} : {1}'
         await self.bot.send_message(
             lol_names_channel,
             fmt.format(context.message.author, league_name))
         # notify mods user needs an invite
         fmt = '{0.mention} needs a club invite, {1.mention}'
         await self.bot.send_message(
             mod_notif_channel, fmt.format(context.message.author,
                                           mod_role))
         # pm the user the welcome message
         try:
             await self.bot.send_message(context.message.author,
                                         variables.welcome_message)
         except:
             pass
         # add user into db
         db.insert({
             'discord_id': context.message.author.id,
             'discord_name': str(context.message.author),
             'league_name': league_name
         })
         db.close()
         # Log
         logging.info(
             (str(context.message.author) + ' is verified').encode("utf-8"))
         logging.info((str(context.message.author) + ' == ' +
                       league_name).encode("utf-8"))
     except Exception as e:
         # if bot has error, log it
         logging.error(('[' + str(context.message.channel) + ']: ' +
                        str(e) + ' for verify').encode("utf-8"))
         db.close()
Example #12
0
class TinyDBStore(object):
    def __init__(self):
        self.drafts_db = TinyDB('event_drafts.json')
        self.events_db = TinyDB('events.json')

    # Drafts
    def contains_draft(self, user_id):
        return self.drafts_db.contains(Query().user_id == user_id)

    def new_draft(self, user_id):
        if self.contains_draft(user_id):
            self.drafts_db.remove(Query().user_id == user_id)

        self.drafts_db.insert({
            'user_id': user_id,
            'current_field': 0,
            'event': {}
        })

    def update_draft(self, user_id, event, current_field):
        self.drafts_db.update(
            {
                'user_id': user_id,
                'current_field': current_field,
                'event': event
            },
            Query().user_id == user_id)

    def get_draft(self, user_id):
        return self.drafts_db.get(Query().user_id == user_id)

    def remove_draft(self, user_id):
        self.drafts_db.remove(Query().user_id == user_id)

    # Events
    def insert_event(self, event):
        event_id = self.events_db.insert(event)
        event['id'] = event_id
        return event

    def update_event(self, event):
        self.events_db.update(event, eids=[event.eid])

    def remove_event(self, event):
        self.events_db.remove(eids=[event.eid])

    def get_events(self, user_id, name=None):
        if name:
            return self.events_db.search((
                (Query().user_id == user_id)
                & (Query().name.test(lambda v: name in v)))
                                         | (Query().invite == 'yes'))
        return self.events_db.search((Query().user_id == user_id)
                                     | (Query().invite == 'yes'))

    def get_event(self, event_id):
        return self.events_db.get(eid=int(event_id))
Example #13
0
 def persist(self, citizen):
     db = TinyDB(self.db_filename)
     query = Query()
     if not db.contains(query.name == citizen.name):
         db.insert(citizen.toDict())
         db.close()
         return True
     db.close()
     return False
Example #14
0
def add_stars(top_poster):
    db = TinyDB('./db.json')
    User = Query()
    if top_poster is not None:
        for poster in top_poster[0]:
            if db.contains(User.uid == poster):
                db.update(increment('stars'), User.uid == poster)
            else:
                db.insert({'uid': poster, 'stars': 1})
Example #15
0
 def predicate(context):
     Users = Query()
     db = TinyDB(variables.db_location,
                 default_table=variables.table_name)
     if db.contains(Users.discord_id == context.message.author.id):
         db.close()
         return True
     db.close()
     return False
class PersistancePipeline(object):
    def __init__(self, tiny_db):
        self.tiny_db = tiny_db

    @classmethod
    def from_crawler(cls, crawler):
        return cls(tiny_db=crawler.settings.get('TINY_DB_FILE'))

    def open_spider(self, spider):
        self.db = TinyDB(self.tiny_db)

    def close_spider(self, spider):
        logging.info('TinyDB Size: %s', str(len(self.db)))

    def process_item(self, item, spider):
        collection = Query()
        #TODO add source and location to item DB key or the link url
        #TODO Check for price drops
        if self.db.contains(collection['title'] == item['title']):
            found_item = self.db.get(collection['title'] == item['title'])
            logging.debug('Found Item in DB: ' + found_item['title'])
            try:
                delta_time_last_seen = datetime.now() - dateutil.parser.parse(
                    found_item['scraped_date'])
                logging.info('item last seen %s days ago',
                             str(delta_time_last_seen.days))
                if delta_time_last_seen.days < 14:
                    logging.info("item seen recently: " + item['title'])
                    #drop the item as we've seen it recently
                    raise DropItem('recently seen item already in database: ' +
                                   item['title'])
                else:
                    logging.info(
                        'item not seen recently, updating item scraped date: '
                        + item['title'])
                    self.db.update({'scraped_date': str(datetime.now())},
                                   collection['title'] == item['title'])

            except KeyError:
                logging.info(
                    'scraped_date key does not exist, adding now as the scraped date: '
                    + item['title'])
                self.db.update({'scraped_date': str(datetime.now())},
                               collection['title'] == item['title'])
            except TypeError:
                logging.info(
                    'scraped_date is not a string or date, adding now as the scraped date:'
                    + item['title'])
                self.db.update({'scraped_date': str(datetime.now())},
                               collection['title'] == item['title'])

        #self.db.upsert(dict(item), collection.title == item['title'])
        else:
            logging.info('Added to DB: ' + item['title'])
            self.db.insert(dict(item))

        return item
Example #17
0
class DB:
    Lock = threading.Lock()

    def __init__(self, path: Optional[str] = None):
        if path is None:
            path = os.path.join(CONFIG['DEFAULT_CONFIG_DIR'], 'nauta_db.json')
        self._db = TinyDB(path,
                          storage=CachingMiddleware(JSONStorage),
                          indent=2)
        self._query = Query()

    def __contains__(self, key):
        return self._db.contains(self._query.username == key)

    def __iter__(self):
        for i in self._db:
            yield i

    def get(self, key: str, default=None):
        r = default
        with self.Lock:
            r = self._db.get(self._query.username == key)
        return r

    def get_by_alias(self, key: str, default=None):
        r = default
        with self.Lock:
            r = self._db.get(self._query.alias == key)
        return r

    def get_aliases(self):
        r = []
        with self.Lock:
            r = self._db.search(self._query.alias != ''
                                and self._query.alias != None)
        return r

    def set(self, key: str, data: dict):
        with self.Lock:
            self._db.update(data, self._query.username == key)

    def set_by_alias(self, key: str, data: dict):
        with self.Lock:
            self._db.update(data, self._query.alias == key)

    def insert(self, key: str, data: dict):
        with self.Lock:
            self._db.insert(data)

    def remove(self, key: str):
        with self.Lock:
            self._db.remove(self._query.username == key)

    def remove_by_alias(self, key: str):
        with self.Lock:
            self._db.remove(self._query.alias == key)
Example #18
0
class Plugin(object):
    def __init__(self, pm):
        self.pm = pm
        self.modulename = 'quotegen'
        self.configPath = 'data_config-{0}_a.json'.format(self.modulename)
        self.configDB = TinyDB(self.configPath) 

    @staticmethod
    def register_events():
        return [
        Events.Command("quotegen", Ranks.Default,
            "Generates a quote from "),
        Events.Command("quotegen.allow", Ranks.Admin),
        Events.Command("quotegen.block", Ranks.Admin)]

    async def handle_command(self, message_object, command, args):
        try:
            print("--{2}--\n[Noku-macro] {0} command from {1} by {3}".format(command, message_object.channel.name, arrow.now().format('MM-DD HH:mm:ss'), message_object.author.name))
        except:
            print("[Noku]Cannot display data, probably emojis.")   

        if self.configDB.contains(Query().chanallow == message_object.channel.id):
            '''
            Add modules checks here
            '''
            if command == "quotegen":
                await self.generate_quote(message_object, args[1])

        #Do not modify or add anything below it's for permissions
        if command == "{0}.allow".format(self.modulename):
            await self.allowChan(message_object)
        if command == "{0}.block".format(self.modulename):
            await self.blockChan(message_object)


    '''
    Add modules here
    '''
    async def generate_quote(self, message_object, args):
        corpus = ""
        for log in TinyDB("{0}@{1}.json".format(message_object.server.name, message_object.server.id)).search(Query().channel == message_object.channel.id):
            if re.match("\w*", log["content"]):
                corpus = corpus + log['content'] + "\n"

        textmodel = markovify.NewlineText(corpus)
        textmodel.make_short_sentence(120)
        await self.pm.client.send_message(message_object.channel, '*{0} says...*\n```{1}```'.format(message_object.channel.name, textmodel.make_short_sentence(120)))
    #Do not modify or add anything below it's for permissions
    async def allowChan(self, message_object):
        self.configDB.insert({'chanallow' : message_object.channel.id});
        await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been allowed access to {0}`'.format(message_object.channel.name, self.modulename))

    async def blockChan(self, message_object):
        self.configDB.remove(Query().chanallow == message_object.channel.id);
        await self.pm.client.send_message(message_object.channel, ':information_source:`Noku Bot-{1} has been blocked access to {0}`'.format(message_object.channel.name, self.modulename))
Example #19
0
class Registration:
    def __init__(self):
        if not os.path.exists('data/'):
            os.makedirs('data')
        self.database = TinyDB('data/registrations.json')

    def register(self, chat, user):
        Chat = Query()
        if self.database.contains(Chat.id == chat.id):
            self.database.update(self.__register_in_chat(user.id),
                                 Chat.id == chat.id)
        else:
            chat_item = {'id': chat.id, 'users': {}}

            self.__register_in_chat(user.id)(chat_item)
            self.database.insert(chat_item)

    def get_registered_users_for_chat(self, chat):
        Chat = Query()
        chat = self.database.get(Chat.id == chat.id)
        return chat['users'] if chat else None

    def is_user_registered(self, chat, user):
        Chat = Query()
        chat = self.database.get(Chat.id == chat.id)
        return chat != None and str(user.id) in chat['users']

    def reward_user(self, chat, user, name):
        Chat = Query()
        self.database.update(self.__reward(user, name), Chat.id == chat.id)

    def get_last_drawing_time(self, chat):
        Chat = Query()
        chat = self.database.get(Chat.id == chat.id)
        return chat[
            'last_drawing_time'] if chat and 'last_drawing_time' in chat else 0

    def get_last_winner(self, chat):
        Chat = Query()
        chat = self.database.get(Chat.id == chat.id)
        return chat['last_winner'] if chat and 'last_winner' in chat else ''

    def __reward(self, user, name):
        def transform(element):
            element['last_drawing_time'] = time()
            element['last_winner'] = name
            element['users'][str(user.id)]['wins'] += 1

        return transform

    def __register_in_chat(self, user_id):
        def transform(element):
            element['users'][user_id] = {'wins': 0}

        return transform
Example #20
0
async def rank(ctx, *, user: discord.Member = None):
    """Shows user rank"""

    author = ctx.message.author
    server = ctx.message.server

    if not user:
        user = author

    logger.info('!rank %s', user.name)
    #print("!rank "+user.name)

    online = len([
        m.status for m in server.members if m.status != discord.Status.offline
    ])
    total_users = len(server.members)
    passed = (ctx.message.timestamp - server.created_at).days
    member_number = sorted(server.members,
                           key=lambda m: m.joined_at).index(user) + 1

    db_ranking = TinyDB('data/ranking-' + server.id + '.json',
                        sort_keys=True,
                        storage=serialization)
    rankings = db_ranking.all()
    #Rank = Query()
    #rankings = db_ranking.search(Rank.name != "")
    #print(len(rankings))
    sorted_rankings = sorted(rankings,
                             key=lambda x: float(x['points']),
                             reverse=True)

    Ranking = Query()
    if (db_ranking.contains(Ranking.author == user.id)):
        rank = db_ranking.get(Ranking.author == user.id)
        #print(rank)
        pos = sorted_rankings.index(rank) + 1
        #print(pos)
        level = get_level(user.joined_at)
        data = discord.Embed(title="Ranking",
                             description=user.name,
                             colour=user.colour)
        data.add_field(name="$SUN points", value=rank['points'])
        data.add_field(name="Total count", value=rank['count'])
        data.add_field(name="Rank", value=str(pos) + "/" + str(total_users))
        data.add_field(name="Level", value=str(level))
        data.set_footer(text="Member #{} | User ID:{}"
                        "".format(member_number, user.id))
        db_ranking.close()

        await client.say(embed=data)
    else:
        db_ranking.close()

        await client.say("Hey, you still have not been ranked")
Example #21
0
async def on_message(message):
    # Make exception for bot-messages:
    if message.author == discord.User.bot:
        return
    time_since_epoch = time.time()
    litcoinlist = TinyDB("litcoin/LITCOIN" + message.server.id + ".json")
    levellist = TinyDB("level/LEVEL" + message.server.id + ".json")
    author = message.author.id

    if not levellist.contains(query["user"] == author):
        levellist.insert(
            {"user": author, "level": 1, "exp": 0, "exp_range_min": 15, "exp_range_max": 25, "time": time_since_epoch})
    exp = randint(levellist.search(query["user"] == author)[0]["exp_range_min"],
                  levellist.search(query["user"] == author)[0]["exp_range_max"])

    if not litcoinlist.contains(query["user"] == author):
        litcoinlist.insert({"user": author, "balance": 500, "time": time_since_epoch, "gain": 1})
    elif litcoinlist.search((query["user"] == author) & (query["time"] < time_since_epoch - 60)):
        litcoinlist.update(add("balance", (litcoinlist.search(query["user"] == author)[0]["gain"])),
                           query["user"] == author)

    if levellist.search(query["user"] == author)[0]["time"] < time_since_epoch - 60:
        levellist.update(add("exp", exp), query["user"] == author)
        levellist.update({"time": time_since_epoch}, query["user"] == author)
    if levelup(levellist.search(query["user"] == author)[0]["exp"],
               levellist.search(query["user"] == author)[0]["level"]):
        levellist.update(add("level", 1), query["user"] == author)
        levellist.update({"exp": 0}, query["user"] == author)
        levelup_message = "`Level_BETA`: " + message.author.name + " leveled up to level `" + str(
            levellist.search(query["user"] == author)[0]["level"]) + "`"
        litcoinlist.update({"gain": levellist.search(query["user"] == author)[0]["level"]}, query["user"] == author)
        levelup_message += ", their litcoin gain per message has increased to `" + str(
            litcoinlist.search(query["user"] == author)[0]["gain"]) + "`"
        litcoinlist.update(add("balance", round((levellist.search(query["user"] == author)[0]["level"] ** 1.2) * 100)),
                           query["user"] == author)
        levelup_message += " and `" + str(round((levellist.search(query["user"] == author)[0][
                                                     "level"] ** 1.2) * 100)) + "` LitCoins have been transfered to their account :moneybag:"
        levelup_message += " :clap::clap:"
        if not notifications_closed("level", message.server, message.channel):
            await hydroBot.send_message(message.channel, levelup_message)
    await hydroBot.process_commands(message)
Example #22
0
def entryno():
    print('-------------------------------------------------------------')
    db2 = TinyDB(path2)
    usr = Query()
    if not db2.contains(usr['Admission no.']):
        db2.insert({'Admission no.': 0})
        return 0
    else:
        docs = db2.search(usr['Admission no.'])
        for doc in docs:
            m = doc['Admission no.']
        return m
Example #23
0
class ListCache(object):

    DB_FILE = 'listing_db.json'
    DB_TTL = timedelta(hours=12)

    def __init__(self):
        self.db = TinyDB(
            os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)),
                         ListCache.DB_FILE))

    @property
    def db(self):
        return self._db

    @db.setter
    def db(self, db):
        self._db = db

    def listing_in_cache(self, listing):
        lquery = Query()
        return self.db.contains(lquery.hsh == listing.hsh)

    def retrieve_listing(self, listing):
        lquery = Query()
        list_dict = self.db.get(lquery.hsh == listing.hsh)
        return Listing.from_dict(list_dict)

    def insert_listing(self, listing):
        if self.listing_in_cache(listing):
            self.update_listing(listing)
        else:
            list_dict = listing.as_dict()
            list_dict['last_updated'] = datetime.now().isoformat()
            list_dict['hsh'] = listing.hsh
            self.db.insert(list_dict)

    def remove_listing(self, listing):
        lquery = Query()
        self.db.remove(lquery.hsh == listing.hsh)

    def update_listing(self, listing):
        lquery = Query()
        if self.listing_in_cache(listing):
            self.remove_listing(listing)
        self.insert_listing(listing)

    def remove_old_listings(self):
        list_ar = self.db.all()
        for listing in list_ar:
            if datetime.strptime(listing['last_updated'],
                                 '%Y-%m-%dT%H:%M:%S.%f'
                                 ) < datetime.now() - ListCache.DB_TTL:
                self.remove_listing(Listing.from_dict(listing))
def start_recording(product_name, product_link, source):
    db2 = TinyDB('currently_recording.json')
    if db2.contains(where('link') == product_link):
        return 'Price graph and predictions currently unavailable for this product.'
    print("product_link: %s" % product_link)
    if db2.insert({
            'name': product_name,
            'link': product_link,
            'source': source
    }):
        return 'Price graph and predictions currently unavailable for this product.'
    else:
        return 'Something went wrong.'
Example #25
0
def update_auction_item(key):
    items_db = TinyDB('items.json', indent=4, separators=(',', ': '))
    if items_db.contains(doc_id=key):
        item = items_db.get(doc_id=key)
        if item['auction_state'] != 'created':
            return jsonify(
                Acknowledgement_base(
                    False, "item has passed the state allowing modification").
                serialize())
        # find the item from db using key and update its properties
        items_db.update(request.json, doc_ids=[key])
        return jsonify(Item_Ack(True, key).serialize())
    return jsonify(Item_Ack(False, key).serialize())
Example #26
0
def remove_auction_item(key):
    items_db = TinyDB('items.json', indent=4, separators=(',', ': '))
    # find the item from db using key and delete the record from db
    if items_db.contains(doc_id=key):
        item = items_db.get(doc_id=key)
        if item['auction_state'] != 'created':
            return jsonify(
                Acknowledgement_base(
                    False,
                    "item has passed the state allowing removal").serialize())
        items_db.remove(doc_ids=[key])
        return jsonify(Item_Ack(True, key).serialize())
    return jsonify(Item_Ack(False, key).serialize())
Example #27
0
class TinyDBStore(object):
    def __init__(self):
        self.drafts_db = TinyDB('event_drafts.json')
        self.events_db = TinyDB('events.json')

    # Drafts
    def contains_draft(self, user_id):
        return self.drafts_db.contains(Query().user_id == user_id)

    def new_draft(self, user_id):
        if self.contains_draft(user_id):
            self.drafts_db.remove(Query().user_id == user_id)

        self.drafts_db.insert({
            'user_id': user_id,
            'current_field': 0,
            'event': {}
        })

    def update_draft(self, user_id, event, current_field):
        self.drafts_db.update({
            'user_id': user_id,
            'current_field': current_field,
            'event': event
        }, Query().user_id == user_id)

    def get_draft(self, user_id):
        return self.drafts_db.get(Query().user_id == user_id)

    def remove_draft(self, user_id):
        self.drafts_db.remove(Query().user_id == user_id)

    # Events
    def insert_event(self, event):
        event_id = self.events_db.insert(event)
        event['id'] = event_id
        return event

    def update_event(self, event):
        self.events_db.update(event, eids=[event.eid])

    def remove_event(self, event):
        self.events_db.remove(eids=[event['id']])

    def get_events(self, user_id, name=None):
        if name:
            return self.events_db.search((Query().user_id == user_id) & (Query().name.test(lambda v: name in v)))
        return self.events_db.search(Query().user_id == user_id)

    def get_event(self, event_id):
        return self.events_db.get(eid=int(event_id))
Example #28
0
async def update_data(user):
    db = TinyDB(path_db)
    usr = Query()
    if not db.contains(usr['ids'] == user.id):
        db.insert({
            'ids': user.id,
            'experience': 0,
            'level': 1,
            'credits': 0,
            'daily': 0,
            'hourly': 0,
            'rep': 0,
            'reptime': 0
        })
Example #29
0
class RulesDb:
    """
    Class managing a TinyDB instance for TT rules for a particular TT.
    """
    def __init__(self, tt_name: str):
        path = 'db/' + tt_name
        if os.path.exists(path) is False:
            os.mkdir(path)

        self.db = TinyDB(path + '/rules.json')

    def get_all_in_db(self) -> list:
        """
        :return: List of all records in DB.
        """
        return self.db.all()

    def add_rule(self, rule: dict):
        """
        Adds Rule to Rules DB overwriting one with the same id if present.
        :param rule: Rule to add.
        """
        doc_id = generate_rule_id(rule)

        if self.db.contains(doc_id=doc_id):
            self.db.remove(doc_ids=[doc_id])
        self.db.insert(table.Document(rule, doc_id=doc_id))

    def add_rule_if_not_present(self, rule: dict):
        """
        Adds Rule to Rules DB if one with the same id is NOT already present.
        :param rule: Rule to add.
        """
        doc_id = generate_rule_id(rule)

        if not self.db.contains(doc_id=doc_id):
            self.db.insert(table.Document(rule, doc_id=doc_id))
class TinyDBGateway(AbstractJSONStorageGateway):
    def __init__(self, file_path: str, table_name: str = "_default") -> None:
        self.table = TinyDB(file_path).table(table_name)

    def create(self, data: dict, max_retries: int = 10) -> dict:
        with transaction(self.table) as tr:
            while max_retries > 0:
                uuid = uuid4()
                if not self.table.contains(where('uuid') == str(uuid)):
                    data.update(uuid=str(uuid))
                    tr.insert(data)
                    return data
                else:
                    max_retries -= 1
            raise StorageError('could not set unique UUID')

    def list_(self) -> list:
        return self.table.all()

    def retrieve(self, uuid: str) -> dict:
        record = self.table.get(where('uuid') == uuid)
        if record:
            return record
        else:
            raise NoResultFound('object do not exist')

    def update(self, uuid: str, data: dict):
        with transaction(self.table) as tr:
            record = self.table.get(where('uuid') == uuid)
            if record:
                tr.update(data, where('uuid') == uuid)
            else:
                raise NoResultFound('object do not exist')

    def delete(self, uuid: str):
        with transaction(self.table) as tr:
            record = self.table.get(where('uuid') == uuid)
            if record:
                tr.remove(where('uuid') == uuid)
            else:
                raise NoResultFound('object do not exist')

    def purge(self):
        self.table.purge()

    def search(self, conditions: dict):
        return self.table.search(
            reduce(lambda x, y: x & y,
                   [where(k) == v for k, v in conditions.items()]))
Example #31
0
class TinyDBComparison(RuleBasedStateMachine):
    '''
    TODO: make one without bundles since: "Note that currently preconditions can’t access 
    bundles; if you need to use preconditions, you should store relevant data on the instance instead."
    '''

    def __init__(self):
        super(TinyDBComparison, self).__init__()
        self.database = TinyDB(storage=MemoryStorage)  # sut, memory db makes sure states are reset
        self.model = defaultdict(set)

    ids = Bundle("ids")
    documents = Bundle("documents")


    @rule(target=documents, v=dictionaries(keys=integers(min_value=0), values=integers(min_value=0)))
    def add_value(self, v):
        '''Generate value to insert'''
        return v

    @rule(target=ids, k=ids, v=documents) # TODO: removing k from here results in "TypeError: unhashable type: 'dict'"
    def insert_value(self, v):
        d_id = self.database.insert(v)  # TinyDB calculates ID when inserting
        print(d_id, v)
        d_id = 6 # TODO: injected fault is not picked up...
        self.model[d_id].add(v)
        return d_id


    @rule(k=ids)
    def remove(self, k):
        self.model.pop(k)
        self.database.remove(doc_id=k)

   
    @rule()
    def get_all(self):
        assert len(self.model) == len(self.database.all())

    
    @rule(k=ids)
    def contains_agree(self, k):
        '''Property'''
        assert (self.model[k] is not None) and (self.database.contains(doc_ids=[k]) is not False)

    @rule(k=ids)
    def values_agree(self, k):
        '''Property'''
        assert set(self.database.get(doc_ids=[k])) == self.model[k]
Example #32
0
class ListCache(object):

    DB_FILE = 'listing_db.json'
    DB_TTL = timedelta(hours=12)

    def __init__(self):
        self.db = TinyDB(os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), ListCache.DB_FILE))

    @property
    def db(self):
        return self._db

    @db.setter
    def db(self, db):
        self._db = db

    def listing_in_cache(self, listing):
        lquery = Query()
        return self.db.contains(lquery.hsh == listing.hsh)

    def retrieve_listing(self, listing):
        lquery = Query()
        list_dict = self.db.get(lquery.hsh == listing.hsh)
        return Listing.from_dict(list_dict)

    def insert_listing(self, listing):
        if self.listing_in_cache(listing):
            self.update_listing(listing)
        else:
            list_dict = listing.as_dict()
            list_dict['last_updated'] = datetime.now().isoformat()
            list_dict['hsh'] = listing.hsh
            self.db.insert(list_dict)

    def remove_listing(self, listing):
        lquery = Query()
        self.db.remove(lquery.hsh == listing.hsh)

    def update_listing(self, listing):
        lquery = Query()
        if self.listing_in_cache(listing):
            self.remove_listing(listing)
        self.insert_listing(listing)

    def remove_old_listings(self):
        list_ar = self.db.all()
        for listing in list_ar:
            if datetime.strptime(listing['last_updated'], '%Y-%m-%dT%H:%M:%S.%f') < datetime.now() - ListCache.DB_TTL:
                self.remove_listing(Listing.from_dict(listing))
Example #33
0
class DataStore(object):
	def __init__(self, store_path):
		self.store_path = os.path.join(store_path,"META")
		try:
			os.makedirs(self.store_path)
		except OSError as exc:
			if exc.errno != errno.EEXIST:
				raise

		self.db = TinyDB(os.path.join(self.store_path,"__meta__.json"))

	def add_file(self, file_path):
		file_path = self._get_internal_path(file_path)
		self._add_to_db(file_path)

	def remove_file(self,file_path):
		file_path = self._get_internal_path(file_path)
		self._remove_from_db(file_path)
		
	def list_all(self):
		return self._list_all_db()
		
	def _init_file_list(self):
		with open(self.store_path,"r") as f:
			for line in tqdm(f):
				self.file_list.append(f)

	def _init_db(self):
		self.db = TinyDB(self.store_path)
		
	def _add_to_db(self,file_path):
		if not self.db.contains(where('file_path')== file_path):
			self.db.insert({'file_path':file_path})

	def _remove_from_db(self,file_path):
		self.db.remove(where('file_path') == file_path)
	
	def _list_all_db(self):
		return [rec['file_path'] for rec in self.db.all()]

	def _get_internal_path(self, path):
		return get_internal_path(path)
Example #34
0
def scrap(url):
    db = TinyDB(DB_FILENAME)
    response = requests.get(url)
    soup = BeautifulSoup(response.text, 'html.parser')
    # Remove promoted section
    soup.select_one('div.view.top-listings').extract()
    # Remove reply window
    soup.select_one('li.result.reply').extract()
    # Find all results
    results = []
    for li in soup.select('li.result'):
        results.append(_parse_result(li))
    # Skip those that are already there - single result found will break out
    # of the loop
    valid = []
    for result in results:
        if db.contains(_prepare_query(result)):
            break
        db.insert(_to_dict(result))
        valid.append(result)
    db.close()
    return valid
Example #35
0
class IconObjectDB(object):
    """The Class for inserting objects in the icon_object.json db using tinydb"""

    def __init__(self):
        super(IconObjectDB, self).__init__()
        try:
            self.db = TinyDB(
                os.path.abspath(
                    os.path.join(
                        os.path.dirname(__file__),
                        "..",
                        "..",
                        "..",
                        "EI",
                        "icons",
                        "icon_objects.json",
                    )
                )
            )
        except Exception as err:
            raise ValueError("Database is locked")

    def insert_object(self, obj, override=False):
        if not self.db.contains(where("name") == obj["name"]):
            print("Inserting: " + str(obj['name']))
            self.db.insert(obj)
        else:
            if self.warn_user(override):
                print("Overwriting: " + str(obj['name']))
                self.db.update(obj, where("name") == obj["name"])
            else:
                return False

    def get_values(self, value):
        obj_list = list()
        for obj in self.get_all():
            if value in obj:
                obj_list.append(obj[value])
        return obj_list

    def get_obj_by_mtl(self, mtl):
        if self.db.contains(where("mtl") == str(mtl)):
            return self.db.search(where("mtl") == str(mtl))[0]

    def get_obj_by_brush(self, brush):
        if self.db.contains(where("brush") == str(brush)):
            return self.db.search(where("brush") == str(brush))[0]

    def get_obj_by_brush_and_mtl(self, brush, mtl):
        if self.db.contains(
            (where("brush") == str(brush)) & (where("mtl") == str(mtl))
        ):
            return self.db.search(
                (where("brush") == str(brush)) & (where("mtl") == str(mtl))
            )[0]

    def get_objs_by_brush(self, brush):
        if self.db.contains(where("brush") == str(brush)):
            return self.db.search(where("brush") == str(brush))

    def get_obj_by_name(self, name):
        if self.db.contains(where("name") == name):
            return self.db.search(where("name") == name)[0]

    def get_all(self):
        return self.db.search(lambda x: True)

    @staticmethod
    def warn_user(override):
        return override
Example #36
0
flist = filter(lambda file: os.path.isfile(os.path.join(config['share_dir'], 
                                                        file)), 
               os.listdir(config['share_dir']))
files = {byte_to_hex(md5((os.path.join(config['share_dir'], fname)))): fname 
         for fname in flist if fname != os.path.basename(config['database'])}

# remove records of which the file does not exist in share directory
for file in db.all():
    if file['md5'] in files:
        db.update({'name': files[file['md5']]}, File.md5==file['md5'])
    else:
        db.remove(File.md5==file['md5'])

# move files in share directory to upload directory if not recorded
for file in files:
    if not db.contains(File.md5==file):
        os.rename(os.path.join(config['share_dir'], files[file]),
                  os.path.join(config['upload_dir'], files[file]))

# record files in upload directory and move to share direcory
def upload(fname=None):
    if fname:
        if os.path.isfile(os.path.join(config['upload_dir'], fname)):
            file = {}
            fmd5 = md5(os.path.join(config['upload_dir'], fname))
            file['md5'] = byte_to_hex(fmd5)
            
            if db.contains(File.md5==file['md5']):
                os.remove(os.path.join(config['upload_dir'], fname))
                return db.get(File.md5==file['md5'])['route']
            
class GCodeRenderPlugin(octoprint.plugin.StartupPlugin, 
                        octoprint.plugin.SettingsPlugin,
                        octoprint.plugin.EventHandlerPlugin,
                        octoprint.plugin.BlueprintPlugin
):
    def initialize(self):
        # Because we use last modified, make sure we only get integers
        os.stat_float_times(False)

        # The actual render jobs
        self.renderJobs = Queue.Queue()

        # Prepare loks for render queue and database accessget_settings_defaults
        self.dbLock = threading.Lock()

        self.preview_extension = "png"

        # Initialize tinydb
        self._prepareDatabase()

        # Cleanup the database and previews folder
        self.cleanup()

        # Begin watching for render jobs
        self._start_render_thread()

        # Fill the queue with any jobs we may have missed
        self._updateAllPreviews()

    def _prepareDatabase(self):
        self.dbLock.acquire()
        self.previews_database_path = os.path.join(self.get_plugin_data_folder(), "previews.json")
        self.previews_database = TinyDB(self.previews_database_path)
        self._previews_query = Query() # underscore for blueprintapi compatability
        self.dbLock.release()
    
    def _updateAllPreviews(self, subFolder = None):
        """
        Reads the entire preview database, checks if there are any outdated previews (last modified of preview
        is before last modified of gcode file) and updates these.
        """ 
        current_folder = self._settings.global_get_basefolder('uploads')

        if subFolder:
            current_folder = os.path.join(current_folder, subFolder)

        self._logger.debug('Scanning folder {0} for render jobs'.format(current_folder))

        for entry in os.listdir(current_folder):
            entry_path = os.path.join(current_folder, entry)
            entry_rel_path = entry

            if subFolder:
                entry_rel_path = subFolder + '/' + entry

            if os.path.isfile(entry_path):
                file_type = octoprint.filemanager.get_file_type(entry_rel_path)
                if(file_type):
                    if file_type[0] is "machinecode":
                        self._updatePreview(entry_path, entry_rel_path)
            else:
                self._updateAllPreviews(entry_rel_path)
   
    def _updatePreview(self, path, filename):
        """
        Checks if the preview is up to date with the gcode file (based on last modified) and re-renders if neceserry.
        """
        self.dbLock.acquire()
        db_entry = self.previews_database.get(self._previews_query.path == path)
        self.dbLock.release()

        
        modtime = os.path.getmtime(path)
        if db_entry is None or db_entry["modtime"] != modtime or not os.path.exists(db_entry["previewPath"]):
            self.render_gcode(path, filename, modtime)

    def cleanup(self):
        """
        Loop through database, remove items not found in upload or preview folder
        """
        self.dbLock.acquire()
        db_entries = self.previews_database.all()
        for db_entry in db_entries:
            if not os.path.exists(db_entry["previewPath"]) or not os.path.exists(db_entry["path"]):
                self.previews_database.remove(eids=[db_entry.eid])
                self._logger.debug("Removed from preview database: %s" % db_entry["filename"])
        

        #Loop through images, remove items not found in db
        image_folder = self._get_image_folder()
        for entry in os.listdir(image_folder):
            entry_path = os.path.join(image_folder, entry)

            if entry_path.endswith(self.preview_extension) and \
                not self.previews_database.contains(self._previews_query.previewPath == entry_path):
                try:
                    os.remove(entry_path)
                    self._logger.debug("Removed preview %s" % entry_path)
                except Exception:
                    self._logger.debug("Could not remove preview %s" % entry_path)
        self.dbLock.release()

    def on_event(self, event, payload, *args, **kwargs):
        if event == Events.UPLOAD:
            if "path" in payload:
                gcodePath = os.path.join(self._settings.global_get_basefolder('uploads'), payload["path"])
                self.render_gcode(gcodePath, payload["name"])
            else:
                self._logger.debug("File uploaded, but no metadata found to create the gcode preview")

    def is_blueprint_protected(self):
        return False

    def get_settings_defaults(self):
        return dict(
            maxPreviewFileSize=52428800 # 50 MB
        )

    def render_gcode(self, path, filename, modtime = None):
        """
        Adds a render job to the render queue
        """
        if not os.path.exists(path):
            self._logger.debug("Could not find file to render: {0}".format(path))
            return

        if not modtime:
             modtime = os.path.getmtime(path)
        
        #TODO: Some error handling; or return a dummy preview
        maxFileSize = self._settings.get_int(["maxPreviewFileSize"])
        if maxFileSize > 0 and os.path.getsize(path) > maxFileSize:
            self._logger.warn("GCode file exceeds max preview file size: %s" % filename)
            return

        # Add the job to the render queue
        self.renderJobs.put({ "path": path, "filename": filename, "modtime": modtime})
        self._logger.debug("Render job enqueued: %s" % filename)
        

    @octoprint.plugin.BlueprintPlugin.route("/previewstatus/<path:filename>", methods=["GET"])
    def previewstatus(self, filename):
        """
        Allows to check whether a preview is available for a gcode file. 
        Query string arguments:
        filename: The gcode file to get the preview status for
        make: Whether or not to start rendering the preview, if there's no preview ready

        GET /previewstatus/<filename>
        """

        #TODO: Add support for other statusses, such as 'rendering failed', 'gcode too big', 'queued for rendering' etc
        
        if not filename:
            response = make_response('Invalid filename', 400)
        else:
            # First check in the database whether a preview is available
            self._logger.debug("Retrieving preview status for %s" % filename)
            self.dbLock.acquire()
            db_entry = self.previews_database.get(self._previews_query.filename == filename)
            self.dbLock.release()

            if not db_entry:
                response = make_response(jsonify({ 'status': 'notfound'}), 200)
            elif os.path.exists(db_entry["previewPath"]):
                response = make_response(jsonify({ 'status': 'ready', 'previewUrl' : db_entry["previewUrl"] }), 200)
            else:
                self._logger.debug("Preview file not found: %s" % db_entry["previewPath"])
                response = make_response(jsonify({ 'status': 'notfound'}), 200)

        return self._make_no_cache(response)

    @octoprint.plugin.BlueprintPlugin.route("/preview/<path:previewFilename>", methods=["GET"])
    def preview(self, previewFilename):
        """
        Retrieves a preview for a gcode file. Returns 404 if preview was not found
        GET /preview/file.gcode
        """
        if not previewFilename:
            response = make_response('Invalid filename', 400)
        else:
            self._logger.debug("Retrieving preview %s" % previewFilename)

            # Check the database for existing previews
            self.dbLock.acquire()
            db_entry = self.previews_database.get(self._previews_query.previewFilename == previewFilename)
            self.dbLock.release()

            # Return the preview file if it is found, otherwise 404
            if not db_entry or not os.path.exists(db_entry["previewPath"]):
                response = make_response('No preview ready', 404)
            else:
                response = send_file(db_entry["previewPath"])

        return response

    @octoprint.plugin.BlueprintPlugin.route("/allpreviews", methods=["GET"])
    def getAllPreviews(self):
        """
        Gets a list of all gcode files for which a preview is available. Useful for initial display 
        of a gcode file list. Removes the need for calling previewstatus a lot of times.
        """
        self.dbLock.acquire()
        db_entries = self.previews_database.all()
        self.dbLock.release()

        previews = []
        for db_entry in db_entries:
            if os.path.exists(db_entry["previewPath"]):
                previews.append({ "filename": db_entry["filename"], "previewUrl" : db_entry["previewUrl"] })

        response = make_response(jsonify({ "previews" : previews }))

        return self._make_no_cache(response)
        

    def _start_render_thread(self):
        """"
        Start the daemon thread that watches the render job queue
        """
        t = threading.Thread(target=self._render_gcode_watch)
        t.setDaemon(True)
        t.start()
        
    def _initialize_parser(self):
         # Read throttling settings from OctoPrint
        throttling_duration = 0
        throttling_interval = 0

        # Make an exception when we're debugging on Windows
        if sys.platform != "win32":
            # OctoPrint default 10ms
            default_throttle = self._settings.global_get_float(["gcodeAnalysis", "throttle_normalprio"])

            # Old OctoPrint versions don't have this setting. Default to 10ms
            if default_throttle is None: 
                default_throttle = 0.01

            throttling_duration = (int)(1000 * default_throttle)
            
            default_throttle_lines = self._settings.global_get_int(["gcodeAnalysis", "throttle_lines"])

            # Old OctoPrint versions don't have this setting. Default to 100 lines
            if default_throttle_lines is None:
                default_throttle_lines = 100

            # OctoPrint default 100, multiply by 50 because we're at C speed, and we're crunching more efficiently
            throttling_interval = 50 * default_throttle_lines
        
        initialized = False

        try:
            initialized = gcodeparser.initialize(width=250, 
                                   height=250, 
                                   throttling_interval=throttling_interval, 
                                   throttling_duration=throttling_duration, 
                                   logger=self._logger)
        except Exception as e:
            self._logger.exception("Exception while initializing gcodeparser")
            return False

        if initialized:

            try:
                gcodeparser.set_print_area(x_min=-37, x_max=328, y_min=-33, y_max=317, z_min=0, z_max=205)
                gcodeparser.set_camera(target="part", distance=(-300, -300, 150))
                gcodeparser.set_background_color((1.0, 1.0, 1.0, 1.0))
                gcodeparser.set_bed_color((0.75, 0.75, 0.75, 1.0))
                gcodeparser.set_part_color((67.0 / 255.0, 74.0 / 255.0, 84.0 / 255.0, 1.0))

            except Exception as e:
                self._logger.exception("Exception while configuring gcodeparser")
                return False

            return True


    def _render_gcode_watch(self):
        """"
        The actual rendering thread. Monitors the render queue, and initiates the render job.
        """

        # It is important we initialize the gcoderender on this thread (for the drawing context)
        initialized = self._initialize_parser()

        if not initialized:
            self._logger.error("Couldn't initialize gcodeparser")
            return

        while True:
            job = self.renderJobs.get() # Will block until a job becomes available
            self._logger.debug("Job found: {0}".format(job['filename']))
            t0 = time.time()
            self._render_gcode_worker(job['path'], job['filename'], job['modtime'])
            t1 = time.time()
            self._logger.info("Rendered preview for {filename} in {t:0.0f} s".format(filename=job['filename'], t=(t1-t0)))
            self.renderJobs.task_done()

    def _render_gcode_worker(self, path, filename, modtime):
        """
        Renders a preview for a gcode file and inserts a record into the preview database.
        """
        if not octoprint.filemanager.valid_file_type(path, type="gcode"):
             self._logger.debug('Not a valid file type: %s' % path)
             return

        if not os.path.exists(path):
            self._logger.debug('File doesn\'t exist: %s' % path)
            return 

        if filename.startswith("."): #TODO: Perform a more comprehensive hidden file check
            self._logger.debug('Hidden file: %s' % path)
            return

        # Notify the client about the render
        self._send_client_message("gcode_preview_rendering", { 
                                            "filename":  filename
                                            })

        # Get a filename for the preview. By including modtime, the previews may be cached by the browser
        imageDest = self._get_imagepath(filename, modtime)
       
        self._logger.debug("Image path: {}".format(imageDest["path"]))
       
        # This is where the magic happens
        self._logger.debug("Begin rendering");
        returncode = 1
        try:
            success = gcodeparser.render_gcode(path, imageDest["path"])
        except Exception as e:
            self._logger.debug("Error in Gcodeparser: %s" % e.message)

        if success:
            # Rendering succeeded
            self._logger.debug("Render complete: %s" % filename)
            url = '/plugin/gcoderender/preview/%s' % imageDest["filename"]
        else:
            # Rendering failed.
            # TODO: set url and path to a failed-preview-image
            self._logger.warn("Render failed: %s" % filename)
            return

        # Query the database for any existing records of the gcode file. 
        # Then, update or insert record
        self.dbLock.acquire()
        db_entry = self.previews_database.get(self._previews_query.path == path)
      
        if not db_entry:
            self.previews_database.insert({ 
                    "filename" : filename, 
                    "path": path, 
                    "modtime" : modtime, 
                    "previewUrl" : url,
                    "previewFilename" : imageDest["filename"],
                    "previewPath" : imageDest["path"]
                })
        else:
            try:
                os.remove(db_entry["previewPath"])
            except Exception:
                self._logger.debug("Could not delete preview %s" % db_entry["previewPath"])

            self.previews_database.update({ 
                    "modtime" : modtime, 
                    "previewUrl" : url,
                    "previewPath" : imageDest["path"], 
                    "previewFilename" : imageDest["filename"]
                }
                , self._previews_query.path == path)
         
        self.dbLock.release()  

        # Notify client the preview is ready
        self._send_client_message("gcode_preview_ready", { 
                                                            "filename":  filename,
                                                            "previewUrl": url
                                                            })

   

    def _make_no_cache(self, response):
        """
        Helper method to set no-cache headers. Not used anymore, as including modtime in filename allows browser caching
        """
        response.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0"
        response.headers["Pragma"] = "no-cache"
        response.headers["Expires"] = "-1"
        return response

    def _get_image_folder(self):
        """
        Gets the folder to save the previews to
        """
        return self._settings.get_plugin_data_folder()

    def _get_imagepath(self, filename, modtime = None):
        """
        Creates a filename for the preview. Returns both filename and (full) path
        """

        # Strip any subfolders and find the basename of the file
        _, tail = os.path.split(filename)
        name, _ = os.path.splitext(tail)

        images_folder = self._get_image_folder()
        
        if not modtime:
            modtime = time.clock()

        new_filename = "{0}_{1}.{2}".format(name, modtime, self.preview_extension)

        image_path = os.path.join(images_folder, new_filename)

        return dict(path = image_path, filename = new_filename)
    
    def _send_client_message(self, message_type, data=None):
        """
        Notify the client
        """
        self._logger.debug("Sending client message with type: {type}, and data: {data}".format(type=message_type, data=data))
        self._plugin_manager.send_plugin_message(self._identifier, dict(type=message_type, data=data))
def walk_and_write_to_db(db: TinyDB):
    """Retrieves paths of all files from server and saves new to database."""
    paths = get_filepaths_on_server()
    for path in paths:
        if not db.contains(where('path') == path):
            db.insert({'path': path, 'datetime': datetime.now()})
Example #39
0
class JobDB:
    """Keeps a database of jobs, with a MD5 hash that encodes the function
    name, version, and all arguments to the function.
    """
    def __init__(self, path):
        self.db = TinyDB(path)
        self.lock = Lock()

    def get_result_or_attach(self, key, prov, running):
        job = Query()
        with self.lock:
            rec = self.db.get(job.prov == prov)

            if 'result' in rec:
                return 'retrieved', rec['key'], rec['result']

            job_running = rec['key'] in running
            wf_running = rec['link'] in running.workflows

            if job_running or wf_running:
                self.db.update(attach_job(key), job.prov == prov)
                return 'attached', rec['key'], None

            print("WARNING: unfinished job in database. Removing it and "
                  " rerunning.", file=sys.stderr)
            self.db.remove(eids=[rec.eid])
            return 'broken', None, None

    def job_exists(self, prov):
        job = Query()
        with self.lock:
            return self.db.contains(job.prov == prov)

    def store_result(self, key, result):
        job = Query()
        with self.lock:
            if not self.db.contains(job.key == key):
                return

        self.add_time_stamp(key, 'done')
        with self.lock:
            self.db.update(
                    {'result': result, 'link': None},
                    job.key == key)
            rec = self.db.get(job.key == key)
            return rec['attached']

    def new_job(self, key, prov, job_msg):
        with self.lock:
            self.db.insert({
                'key': key,
                'attached': [],
                'prov': prov,
                'link': None,
                'time': {'schedule': time_stamp()},
                'version': job_msg['data']['hints'].get('version'),
                'function': job_msg['data']['function'],
                'arguments': job_msg['data']['arguments']
            })

        return key, prov

    def add_link(self, key, ppn):
        job = Query()
        with self.lock:
            self.db.update({'link': ppn}, job.key == key)

    def get_linked_jobs(self, ppn):
        job = Query()
        with self.lock:
            rec = self.db.search(job.link == ppn)
            return [r['key'] for r in rec]

    def add_time_stamp(self, key, name):
        def update(r):
            r['time'][name] = time_stamp()

        job = Query()
        with self.lock:
            self.db.update(
                update,
                job.key == key)