def getcolumns(stream): """Function that return the colums of a SELECT query""" pipe = Pipeline() pipe.append(ColumnsSelect()) return pipe(stream)
def compact(stream): """Function that return a compacted version of the stream""" pipe = Pipeline() pipe.append(StripComments()) pipe.append(StripWhitespace) return pipe(stream)
def getlimit(stream): pipe = Pipeline() pipe.append(Limit()) result = pipe(stream) try: return int(result) except ValueError: return result
def getlimit(stream): """Function that return the LIMIT of a input SQL """ pipe = Pipeline() pipe.append(Limit()) result = pipe(stream) try: return int(result) except ValueError: return result
def getlimit(stream): """Function that return the LIMIT of a input sql """ pipe = Pipeline() pipe.append(Limit()) result = pipe(stream) try: return int(result) except ValueError: return result
def get_create_table_info(stream): """ Function that returns the columns of a CREATE TABLE statement including their type and NULL declaration. The nullable declaration is None if not specified, else 'NOT NULL' or 'NULL'. >>> import lexer >>> get_create_table_info(lexer.tokenize('CREATE TABLE t ( a INT NOT NULL )')) [('t', {0: ('a', 'INT', 'NOT NULL')})] """ pipe = Pipeline() pipe.append(InfoCreateTable()) return pipe(stream)
def Compact(sql, includePath="sql"): """Function that return a compacted version of the input SQL query""" pipe = Pipeline() pipe.append(tokenize) pipe.append(IncludeStatement(includePath)) pipe.append(StripComments()) # pipe.append(StripWhitespaceFilter()) # pipe.append(SerializerUnicode()) return pipe(sql)
def parse_string(self, sql, method_name, dir_path='sql', bypass_types=False, lazy=False): """ Build a function from a string containing a SQL query Also add the function as a method to the AntiORM class :param sql: the SQL code of the method to be parsed :type sql: string :param method_name: the name of the method :type method_name: string :param dir_path: path to the dir with the SQL files (for INCLUDE) :type dir_path: string :param bypass_types: set if parsing should bypass types :type bypass_types: boolean :param lazy: set if parsing should be postpone until required :type lazy: boolean :return: the parsed function or None if `lazy` is True :rtype: function or None """ # Lazy processing, store data & only do the parse if later is required if lazy: self._lazy[method_name] = (self.parse_string, sql, dir_path, bypass_types) return # Disable by-pass of types if not using CPython compatible bytecode if bypass_types and not _getframe: warn(RuntimeWarning("Can't acces to stack. " "Disabling by-pass of types.")) bypass_types = False # Set the dirpaths where to look for the INCLUDE statements dirpaths = self._dirpaths if dir_path not in dirpaths: dirpaths.append(dir_path) pipe = Pipeline() pipe.append(tokenize) pipe.append(IncludeStatement(dirpaths)) stream = compact(pipe(sql.strip())) # One statement query if len(split2(stream)) == 1: return self._one_statement(method_name, stream, bypass_types) # Multiple statement query return self._multiple_statement(method_name, stream, bypass_types)
def setUp(self): self.pipe = Pipeline() self.pipe.append(tokenize) self.pipe.append(ColumnsSelect())
class Test(unittest.TestCase): def setUp(self): self.pipe = Pipeline() self.pipe.append(tokenize) self.pipe.append(ColumnsSelect()) def test_1(self): sql = """ -- type: script -- return: integer INCLUDE "Direntry.make.sql"; INSERT INTO directories(inode) VALUES(:inode) LIMIT 1""" self.assertEqual([], self.pipe(sql)) def test_2(self): sql = """ SELECT child_entry,asdf AS inode, creation FROM links WHERE parent_dir == :parent_dir AND name == :name LIMIT 1""" self.assertEqual([u'child_entry', u'inode', u'creation'], self.pipe(sql)) def test_3(self): sql = """ SELECT 0 AS st_dev, 0 AS st_uid, 0 AS st_gid, dir_entries.type AS st_mode, dir_entries.inode AS st_ino, COUNT(links.child_entry) AS st_nlink, :creation AS st_ctime, dir_entries.access AS st_atime, dir_entries.modification AS st_mtime, -- :creation AS st_ctime, -- CAST(STRFTIME('%s',dir_entries.access) AS INTEGER) AS st_atime, -- CAST(STRFTIME('%s',dir_entries.modification) AS INTEGER) AS st_mtime, COALESCE(files.size,0) AS st_size, -- Python-FUSE COALESCE(files.size,0) AS size -- PyFilesystem FROM dir_entries LEFT JOIN files ON dir_entries.inode == files.inode LEFT JOIN links ON dir_entries.inode == links.child_entry WHERE dir_entries.inode == :inode GROUP BY dir_entries.inode LIMIT 1""" self.assertEqual([ u'st_dev', u'st_uid', u'st_gid', u'st_mode', u'st_ino', u'st_nlink', u'st_ctime', u'st_atime', u'st_mtime', u'st_size', u'size' ], self.pipe(sql))
class Test(unittest.TestCase): def setUp(self): self.pipe = Pipeline() self.pipe.append(tokenize) self.pipe.append(ColumnsSelect()) def test_1(self): sql = """ -- type: script -- return: integer INCLUDE "Direntry.make.sql"; INSERT INTO directories(inode) VALUES(:inode) LIMIT 1""" self.assertEqual([], self.pipe(sql)) def test_2(self): sql = """ SELECT child_entry,asdf AS inode, creation FROM links WHERE parent_dir == :parent_dir AND name == :name LIMIT 1""" self.assertEqual([u'child_entry', u'inode', u'creation'], self.pipe(sql)) def test_3(self): sql = """ SELECT 0 AS st_dev, 0 AS st_uid, 0 AS st_gid, dir_entries.type AS st_mode, dir_entries.inode AS st_ino, COUNT(links.child_entry) AS st_nlink, :creation AS st_ctime, dir_entries.access AS st_atime, dir_entries.modification AS st_mtime, -- :creation AS st_ctime, -- CAST(STRFTIME('%s',dir_entries.access) AS INTEGER) AS st_atime, -- CAST(STRFTIME('%s',dir_entries.modification) AS INTEGER) AS st_mtime, COALESCE(files.size,0) AS st_size, -- Python-FUSE COALESCE(files.size,0) AS size -- PyFilesystem FROM dir_entries LEFT JOIN files ON dir_entries.inode == files.inode LEFT JOIN links ON dir_entries.inode == links.child_entry WHERE dir_entries.inode == :inode GROUP BY dir_entries.inode LIMIT 1""" self.assertEqual([u'st_dev', u'st_uid', u'st_gid', u'st_mode', u'st_ino', u'st_nlink', u'st_ctime', u'st_atime', u'st_mtime', u'st_size', u'size'], self.pipe(sql))