def get_preview(self, files, append): params = dict(append=append, ) payload = { "files": [{ "fileName": os.path.basename(f), "size": os.path.getsize(f), "uploadId": i, } for i, f in enumerate(files)] } response = self._post( endpoint=self._uri('/files/upload/preview'), params=params, json=payload, ) import_id_map = dict() for p in response.get("packages", list()): import_id = p.get("importId") warnings = p.get("warnings", list()) for warning in warnings: log.warn("API warning: {}".format(warning)) for f in p.get("files", list()): index = f.get("uploadId") import_id_map[files[index]] = import_id return import_id_map
def init_settings_table(self, con): # check for settings table q = "SELECT name FROM sqlite_master WHERE type='table' AND name='settings'" r = con.execute(q) if r.fetchone() is None: log.info('Cache - Creating \'settings\' table') # create settings table q = """ CREATE TABLE settings ( ts_page_size INTEGER NOT NULL, ts_format CHAR(50) NOT NULL, max_bytes INTEGER NOT NULL, modified DATETIME) """ con.execute(q) # insert settings values q = """ INSERT INTO settings VALUES ({page_size}, '{format}', {max_bytes},'{time}') """.format( page_size = self.page_size, format = 'PROTOBUF', max_bytes = self.settings.cache_max_size, time = datetime.now().isoformat()) con.execute(q) else: # settings table exists # 1. check for ts_format field (not there indicating old cache) result = con.execute("PRAGMA table_info('settings');").fetchall() fields = list(zip(*result))[1] if 'ts_format' not in fields: # this means they used an older client to initalize the cache, and because # we switched the serialization format, we'll need to refresh it. log.warn('Deprecated cache format detected - clearing & reinitializing cache...') self.clear() # 2. check page size result = con.execute("SELECT ts_page_size FROM settings").fetchone() if result is not None: # page size entry exists self.page_size = result[0] if self.settings.ts_page_size != self.page_size: log.warn('Using existing page_size={} from DB settings (user specified page_size={})' \ .format( self.page_size, self.settings.ts_page_size)) else: # somehow, there is no page size entry self.page_size = self.settings.ts_page_size
def clear(self): import shutil if self._conn is not None: with self.index_con as con: # remove page entries con.execute('DELETE FROM ts_pages;') con.commit() self._conn.close() self._conn = None try: # delete index file os.remove(self.index_loc) except: log.warn('Could not delete index file: {}'.format(self.index_loc)) shutil.rmtree(self.dir, ignore_errors=True) # reset self.init_dir() self.init_tables()
def get_page_data(self, channel, page): has_data = self.page_has_data(channel, page) if has_data is None: # page not present in cache return None elif not has_data: # page is empty return pd.Series([], index=pd.core.index.DatetimeIndex([])) # page has data, let's get it filename = self.page_file(channel.id, page, make_dir=True) if os.path.exists(filename): # get page data from file with open(filename,'rb') as f: series = read_segment(channel, f.read()) # update access count self.update_page(channel, page, has_data) return series else: # page file has been deleted recently? log.warn('Page file not found: {}'.format(filename)) return None
def set_page_data(self, channel, page, data, update=False): has_data = False if data is None else len(data)>0 if has_data: # there is data, write it to file filename = self.page_file(channel.id, page, make_dir=True) segment = create_segment(channel=channel, series=data) with open(filename, 'wb') as f: f.write(segment.SerializeToString()) self.page_written() try: if update: # modifying an existing page entry self.update_page(channel, page, has_data) else: # adding a new page entry self.set_page(channel, page, has_data) except sqlite3.OperationalError: log.warn('Indexing DB inaccessible, resetting connection.') if self._conn is not None: self._conn.close() self._conn = None except sqlite3.IntegrityError: # page already exists - ignore pass