def put(self, site, rule, context, source_files=None): if source_files is None: source_files = [None] with self.connection() as db: # Preserve existing source files cursor = db.execute( 'SELECT source_files ' 'FROM contexts ' 'WHERE site = ? AND rule = ?;', (site, rule)) existing_source_files = cursor.fetchone() if existing_source_files: existing_source_files = pickle.loads( str(existing_source_files[0])) source_files += existing_source_files source_files = sorted(list(set(source_files))) # Check to see if the context is already shelved. cursor = db.execute( 'SELECT id FROM contexts ' 'WHERE site = ? AND rule = ?;', (site, rule)) serialized_context = pickle.dumps(context, HIGHEST_PROTOCOL) serialized_source_files = pickle.dumps(source_files, HIGHEST_PROTOCOL) # Optimize pickle size, and conform it to sqlite's BLOB type. serialized_context = blobify( pickletools.optimize(serialized_context)) serialized_source_files = blobify( pickletools.optimize(serialized_source_files)) if cursor.fetchone() is None: db.execute( 'INSERT INTO contexts ' '(site, rule, context, source_files) VALUES (?, ?, ?, ?);', (site, rule, serialized_context, serialized_source_files)) else: db.execute( 'UPDATE contexts ' 'SET context = ?, ' ' source_files = ? ' 'WHERE site = ? AND rule = ?;', (serialized_context, serialized_source_files, site, rule)) db.commit()
def put(self, site, rule, context, source_files=None): if source_files is None: source_files = [None] with self.connection() as db: # Preserve existing source files cursor = db.execute('SELECT source_files ' 'FROM contexts ' 'WHERE site = ? AND rule = ?;', (site, rule)) existing_source_files = cursor.fetchone() if existing_source_files: existing_source_files = pickle.loads(str(existing_source_files[0])) source_files += existing_source_files source_files = sorted(list(set(source_files))) # Check to see if the context is already shelved. cursor = db.execute('SELECT id FROM contexts ' 'WHERE site = ? AND rule = ?;', (site, rule)) serialized_context = pickle.dumps(context, HIGHEST_PROTOCOL) serialized_source_files = pickle.dumps(source_files, HIGHEST_PROTOCOL) # Optimize pickle size, and conform it to sqlite's BLOB type. serialized_context = blobify(pickletools.optimize(serialized_context)) serialized_source_files = blobify(pickletools.optimize(serialized_source_files)) if cursor.fetchone() is None: db.execute('INSERT INTO contexts ' '(site, rule, context, source_files) VALUES (?, ?, ?, ?);', (site, rule, serialized_context, serialized_source_files)) else: db.execute('UPDATE contexts ' 'SET context = ?, ' ' source_files = ? ' 'WHERE site = ? AND rule = ?;', (serialized_context, serialized_source_files, site, rule)) db.commit()
def put(self, site, rule, context): with self.connection() as db: # Check to see if the context is already shelved. cursor = db.execute('SELECT id FROM contexts ' 'WHERE site = ? AND rule = ?;', (site, rule)) serialized = pickle.dumps(context, HIGHEST_PROTOCOL) # Optimize pickle size, and conform it to sqlite's BLOB type. serialized = blobify(pickletools.optimize(serialized)) if cursor.fetchone() is None: db.execute('INSERT INTO contexts ' '(site, rule, context) VALUES (?, ?, ?);', (site, rule, serialized)) else: db.execute('UPDATE contexts ' 'SET context = ? ' 'WHERE site = ? AND rule = ?;', (serialized, site, rule)) db.commit()