def listen(self, database, cacheproxy): """ Listen API changes of couchDB and update tree when it is necessary TODOS : use filter (cache/all) """ dbutils.init_database_views('laptop') self.cacheproxy = cacheproxy self.cache = cacheproxy[0] db = dbutils.get_db(database) sequences = db.changes(include_docs = False, feed='normal') all_changes = db.changes(feed='continuous', since= sequences['last_seq'], heartbeat='1000', include_docs=True) for line in all_changes: logger.info(line) if self._is_deleted(line): logger.info('_is_deleted') if line['doc']['_id'] in self.cache['path_id']: self.delete_document(line['doc']) else: if self._is_file(line): if self._is_new(line): self.add_document(line['doc']) else: self.update_file(line['doc']) elif self._is_folder(line): if self._is_new(line): self.add_document(line['doc']) else: self.update_folder(line['doc'])
def get_binary_progression(database): ''' Recover progression of binary downloads. ''' db = dbutils.get_db(database) files = db.view("file/all") binaries = db.view('binary/all') if len(files) is 0: return 1 else: return len(binaries)/float(len(files))
def get_binary_progression(database): ''' Recover progression of binary downloads. ''' print "recover progression for %s" % database db = dbutils.get_db(database) if db is not None: files = db.view("file/all") binaries = db.view('binary/all') if len(files) is 0: return 1 else: return len(binaries)/float(len(files)) else: return 0
def __init__(self, name, device_config_path, remote_url, device_mount_path): ''' Register information required to handle caching. ''' self.name = name self.device_config_path = device_config_path self.remote_url = remote_url self.device_mount_path = device_mount_path self.cache_path = os.path.join(device_config_path, 'cache') self.db = dbutils.get_db(self.name) self.metadata_cache = cache.Cache() if not os.path.isdir(self.cache_path): os.makedirs(self.cache_path)
def __init__(self, database): self.db = dbutils.get_db(database) # Declare variables # Init tree cacheproxy = manager.list() cacheproxy.append({}) self.cacheproxy = cacheproxy self.cache = cacheproxy[0] self.cache['tree'] = {} self.cache['path_id'] = {} self.cache['binaries'] = {} self.cache['st'] = {} self.cacheproxy[0] = self.cache """treeproxy = manager.list() treeproxy.append({}) self.treeproxy = treeproxy self.tree = treeproxy[0] # Init path_id pathproxy = manager.list() pathproxy.append({}) self.path_id = pathproxy[0] # Init st stproxy = manager.list() stproxy.append({}) self.st = stproxy[0] # Init bianries binaryproxy = manager.list() binaryproxy.append({}) self.binaries = binaryproxy[0]""" # Init variables self.init_variables("") #self.treeproxy[0] = self.tree #pathproxy[0] = self.path_id # Listen API changes to update variables listen = Process(target = self.listen, args = [database, cacheproxy]) listen.start()
sys.setdefaultencoding('utf8') # Get current timestamp ts = time.time() st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') # Get new listings req = requests.get('http://bccondos.net/821-cambie') soup = BeautifulSoup(req.content, 'html.parser') table = soup.find_all('table')[4] new_listings = read_table(table) new_listings.date = st print new_listings # Get most recently stored listings from db db = dbutils.get_db('./listings.db') try: prev_active = pd.read_sql_query("select * from active_listings", db) except BaseException: new_listings.to_sql('active_listings', db, if_exists='replace') dbutils.make_query(db, "drop table if exists 'past_listings'") sys.exit(0) if 'index' in prev_active.columns: del prev_active['index'] try: is_new = not (new_listings == prev_active).all().all() except ValueError: is_new = True