def validate(converter, val): if val is not None: if isinstance(val, str): pass elif isinstance(val, unicode): val = val.encode(converter.encoding) else: throw(TypeError, 'Value type for attribute %s must be str in encoding %r. Got: %r' % (converter.attr, converter.encoding, type(val))) return BasestringConverter.validate(converter, val)
def validate(converter, val): if isinstance(val, timedelta): pass elif isinstance(val, basestring): val = str2timedelta(val) else: throw(TypeError, "Attribute %r: expected type is 'timedelta'. Got: %r" % (converter.attr, val)) mcs = converter.round_microseconds_to_precision(val.microseconds, converter.precision) if mcs is not None: val = timedelta(val.days, val.seconds, mcs) return val
def validate(converter, val): if isinstance(val, datetime): pass elif isinstance(val, basestring): val = str2datetime(val) else: throw(TypeError, "Attribute %r: expected type is 'datetime'. Got: %r" % (converter.attr, val)) mcs = converter.round_microseconds_to_precision(val.microsecond, converter.precision) if mcs is not None: val = val.replace(microsecond=mcs) return val
def _get_create_sql(index, inside_table): schema = index.schema case = schema.case quote_name = schema.provider.quote_name cmd = [] append = cmd.append if not inside_table: if index.is_pk: throw(DBSchemaError, 'Primary key index cannot be defined outside of table definition') append(case('CREATE')) if index.is_unique: append(case('UNIQUE')) append(case('INDEX')) # if schema.provider.index_if_not_exists_syntax: # append(case('IF NOT EXISTS')) append(quote_name(index.name)) append(case('ON')) append(quote_name(index.table.name)) else: if index.name: append(case('CONSTRAINT')) append(quote_name(index.name)) if index.is_pk: append(case('PRIMARY KEY')) elif index.is_unique: append(case('UNIQUE')) else: append(case('INDEX')) append(schema.column_list(index.columns)) return ' '.join(cmd)
def decompile(decompiler): code = decompiler.code co_code = code.co_code free = code.co_cellvars + code.co_freevars try: while decompiler.pos < decompiler.end: i = decompiler.pos if i in decompiler.targets: decompiler.process_target(i) op = ord(code.co_code[i]) i += 1 if op >= HAVE_ARGUMENT: oparg = ord(co_code[i]) + ord(co_code[i+1])*256 i += 2 if op == EXTENDED_ARG: op = ord(code.co_code[i]) i += 1 oparg = ord(co_code[i]) + ord(co_code[i+1])*256 + oparg*65536 i += 2 if op in hasconst: arg = [code.co_consts[oparg]] elif op in hasname: arg = [code.co_names[oparg]] elif op in hasjrel: arg = [i + oparg] elif op in haslocal: arg = [code.co_varnames[oparg]] elif op in hascompare: arg = [cmp_op[oparg]] elif op in hasfree: arg = [free[oparg]] else: arg = [oparg] else: arg = [] opname = opnames[op].replace('+', '_') # print(opname, arg, decompiler.stack) method = getattr(decompiler, opname, None) if method is None: throw(NotImplementedError('Unsupported operation: %s' % opname)) decompiler.pos = i x = method(*arg) if x is not None: decompiler.stack.append(x) except AstGenerated: pass
def __init__(converter, py_type, attr=None): converter.py_type = py_type converter.attr = attr if attr is None: return kwargs = attr.kwargs.copy() converter.init(kwargs) for option in kwargs: throw(TypeError, 'Attribute %s has unknown option %r' % (attr, option))
def validate(converter, val): max_len = converter.max_len val_len = len(val) if max_len and val_len > max_len: throw(ValueError, 'Value for attribute %s is too long. Max length is %d, value length is %d' % (converter.attr, max_len, val_len)) return val
def process_target(decompiler, pos, partial=False): if pos is None: limit = None elif partial: limit = decompiler.targets.get(pos, None) else: limit = decompiler.targets.pop(pos, None) top = decompiler.stack.pop() while True: top = simplify(top) if top is limit: break if isinstance(top, ast.GenExprFor): break top2 = decompiler.stack[-1] if isinstance(top2, ast.GenExprFor): break if partial and hasattr(top2, 'endpos') and top2.endpos == pos: break if isinstance(top2, (ast.And, ast.Or)): if top2.__class__ == top.__class__: top2.nodes.extend(top.nodes) else: top2.nodes.append(top) elif isinstance(top2, ast.IfExp): # Python 2.5 top2.else_ = top if hasattr(top, 'endpos'): top2.endpos = top.endpos if decompiler.targets.get(top.endpos) is top: decompiler.targets[top.endpos] = top2 else: throw(NotImplementedError('Expression is too complex to decompile, try to pass query as string, e.g. select("x for x in Something")')) top2.endpos = max(top2.endpos, getattr(top, 'endpos', 0)) top = decompiler.stack.pop() decompiler.stack.append(top)
def __init__(builder, provider, ast): builder.provider = provider builder.quote_name = provider.quote_name builder.paramstyle = paramstyle = provider.paramstyle builder.ast = ast builder.indent = 0 builder.keys = {} builder.inner_join_syntax = options.INNER_JOIN_SYNTAX builder.result = flat(builder(ast)) builder.sql = u''.join(map(unicode, builder.result)).rstrip('\n') if paramstyle in ('qmark', 'format'): params = tuple(x for x in builder.result if isinstance(x, Param)) def adapter(values): return tuple(convert(values, params)) elif paramstyle == 'numeric': params = tuple(param for param in sorted(builder.keys.itervalues(), key=attrgetter('id'))) def adapter(values): return tuple(convert(values, params)) elif paramstyle in ('named', 'pyformat'): params = tuple(param for param in sorted(builder.keys.itervalues(), key=attrgetter('id'))) def adapter(values): return dict(('p%d' % param.id, value) for param, value in zip(params, convert(values, params))) else: throw(NotImplementedError, paramstyle) builder.params = params builder.layout = tuple(param.key for param in params) builder.adapter = adapter
def get_normalized_type_of(value): t = type(value) if t is tuple: return tuple(get_normalized_type_of(item) for item in value) try: hash(value) # without this, cannot do tests like 'if value in special_fucntions...' except TypeError: throw(TypeError, "Unsupported type %r" % t.__name__) if t.__name__ == "EntityMeta": return SetType(value) if t.__name__ == "EntityIter": return SetType(value.entity) if PY2 and isinstance(value, str): try: value.decode("ascii") except UnicodeDecodeError: raise else: return unicode elif isinstance(value, unicode): return unicode if t in function_types: return FuncType(value) if t is types.MethodType: return MethodType(value) return normalize_type(t)
def validate(converter, val): if isinstance(val, float): s = str(val) if float(s) != val: s = repr(val) val = Decimal(s) try: val = Decimal(val) except InvalidOperation, exc: throw(TypeError, 'Invalid value for attribute %s: %r' % (converter.attr, val))
def __unicode__(param): paramstyle = param.style if paramstyle == 'qmark': return u'?' elif paramstyle == 'format': return u'%s' elif paramstyle == 'numeric': return u':%d' % param.id elif paramstyle == 'named': return u':p%d' % param.id elif paramstyle == 'pyformat': return u'%%(p%d)s' % param.id else: throw(NotImplementedError)
def fk_exists(provider, connection, table_name, fk_name): owner_name, table_name = provider.split_table_name(table_name) if not isinstance(fk_name, basestring): throw(NotImplementedError) cursor = connection.cursor() cursor.execute("SELECT 1 FROM user_constraints WHERE constraint_type = 'R' " 'AND table_name = :tn AND constraint_name = :cn AND owner = :o', dict(tn=table_name, cn=fk_name, o=owner_name)) return cursor.fetchone() is not None
def index_exists(provider, connection, table_name, index_name): owner_name, table_name = provider.split_table_name(table_name) if not isinstance(index_name, basestring): throw(NotImplementedError) cursor = connection.cursor() cursor.execute('SELECT 1 FROM all_indexes WHERE owner = :o ' 'AND index_name = :i AND table_owner = :o AND table_name = :t', dict(o=owner_name, i=index_name, t=table_name)) return cursor.fetchone() is not None
def _put_object(bag, obj): entity = obj.__class__ if bag.database.entities.get(entity.__name__) is not entity: throw(TypeError, 'Entity %s does not belong to database %r' % (entity.__name__, bag.database)) cache = bag.session_cache if cache is None: cache = bag.session_cache = obj._session_cache_ elif obj._session_cache_ is not cache: throw(TransactionError, 'An attempt to mix objects belonging to different transactions') bag.objects[entity].add(obj)
def connect(pool): con = pool.con if con is not None: return con filename = pool.filename if not pool.create_db and not os.path.exists(filename): throw(IOError, "Database file is not found: %r" % filename) pool.con = con = sqlite.connect(filename) _init_connection(con) return con
def __init__(constraint, name, schema): if name is not None: assert name not in schema.names if name in schema.constraints: throw(DBSchemaError, "Constraint with name %r already exists" % name) schema.names[name] = constraint schema.constraints[name] = constraint constraint.schema = schema constraint.name = name
def put(bag, x): if isinstance(x, Entity): bag._put_object(x) else: try: x = list(x) except: throw(TypeError, 'Entity instance or a sequence of instances expected. Got: %r' % x) for item in x: if not isinstance(item, Entity): throw(TypeError, 'Entity instance or a sequence of instances expected. Got: %r' % item) bag._put_object(item)
def validate(self, val, *args, **kwargs): val = super().validate(val, *args, **kwargs) if val not in self.__choices.values(): throw( ValueError, 'Choice {} is not valid. Valid choices are {}.'.format( val, self.__choices.values(), ) ) return val
def split_table_name(provider, table_name): if isinstance(table_name, basestring): return provider.default_schema_name, table_name if not table_name: throw(TypeError, 'Invalid table name: %r' % table_name) if len(table_name) != 2: size = len(table_name) throw(TypeError, '%s qualified table name must have two components: ' '%s and table_name. Got %d component%s: %s' % (provider.dialect, provider.name_before_table, size, 's' if size != 1 else '', table_name)) return table_name[0], table_name[1]
def init(converter, kwargs): Converter.init(converter, kwargs) min_val = kwargs.pop('min', None) if min_val is not None and not isinstance(min_val, (int, long)): throw(TypeError, "'min' argument for attribute %s must be int. Got: %r" % (converter.attr, min_val)) max_val = kwargs.pop('max', None) if max_val is not None and not isinstance(max_val, (int, long)): throw(TypeError, "'max' argument for attribute %s must be int. Got: %r" % (converter.attr, max_val)) converter.min_val = min_val converter.max_val = max_val
def validate(converter, val): if PY2 and isinstance(val, str): val = val.decode('ascii') elif not isinstance(val, unicode): throw(TypeError, 'Value type for attribute %s must be %s. Got: %r' % (converter.attr, unicode.__name__, type(val))) max_len = converter.max_len val_len = len(val) if max_len and val_len > max_len: throw(ValueError, 'Value for attribute %s is too long. Max length is %d, value length is %d' % (converter.attr, max_len, val_len)) return val
def sql2py(converter, val): if isinstance(val, timedelta): total_seconds = val.days * (24 * 60 * 60) + val.seconds if 0 <= total_seconds <= 24 * 60 * 60: minutes, seconds = divmod(total_seconds, 60) hours, minutes = divmod(minutes, 60) return time(hours, minutes, seconds, val.microseconds) elif not isinstance(val, time): throw(ValueError, 'Value of unexpected type received from database%s: instead of time or timedelta got %s' % ('for attribute %s' % converter.attr if converter.attr else '', type(val))) return val
def validate(converter, val): if isinstance(val, UUID): return val if isinstance(val, buffer): return UUID(bytes=val) if isinstance(val, basestring): if len(val) == 16: return UUID(bytes=val) return UUID(hex=val) if isinstance(val, int): return UUID(int=val) if converter.attr is not None: throw(ValueError, 'Value type of attribute %s must be UUID. Got: %r' % (converter.attr, type(val))) else: throw(ValueError, 'Expected UUID value, got: %r' % type(val))
def index_exists(provider, connection, table_name, index_name, case_sensitive=True): owner_name, table_name = provider.split_table_name(table_name) if not isinstance(index_name, basestring): throw(NotImplementedError) if case_sensitive: sql = 'SELECT index_name FROM all_indexes WHERE owner = :o ' \ 'AND index_name = :i AND table_owner = :o AND table_name = :t' else: sql = 'SELECT index_name FROM all_indexes WHERE owner = :o ' \ 'AND upper(index_name) = upper(:i) AND table_owner = :o AND table_name = :t' cursor = connection.cursor() cursor.execute(sql, dict(o=owner_name, i=index_name, t=table_name)) row = cursor.fetchone() return row[0] if row is not None else None
def __init__(column, name, table, sql_type, is_not_null=None): if name in table.column_dict: throw(DBSchemaError, "Column %r already exists in table %r" % (name, table.name)) table.column_dict[name] = column table.column_list.append(column) column.table = table column.name = name column.sql_type = sql_type column.is_not_null = is_not_null column.is_pk = False column.is_pk_part = False column.is_unique = False
def validate(converter, val): if isinstance(val, datetime): pass elif isinstance(val, basestring): val = str2datetime(val) else: throw(TypeError, "Attribute %r: expected type is 'datetime'. Got: %r" % (converter.attr, val)) p = converter.precision if not p: val = val.replace(microsecond=0) elif p == 6: pass else: rounding = 10 ** (6-p) microsecond = (val.microsecond // rounding) * rounding val = val.replace(microsecond=microsecond) return val
def fk_exists(provider, connection, table_name, fk_name, case_sensitive=True): owner_name, table_name = provider.split_table_name(table_name) if not isinstance(fk_name, basestring): throw(NotImplementedError) if case_sensitive: sql = "SELECT constraint_name FROM user_constraints WHERE constraint_type = 'R' " \ 'AND table_name = :tn AND constraint_name = :cn AND owner = :o' else: sql = "SELECT constraint_name FROM user_constraints WHERE constraint_type = 'R' " \ 'AND table_name = :tn AND upper(constraint_name) = upper(:cn) AND owner = :o' cursor = connection.cursor() cursor.execute(sql, dict(tn=table_name, cn=fk_name, o=owner_name)) row = cursor.fetchone() return row[0] if row is not None else None
def to_dict(objects): if isinstance(objects, Entity): objects = [ objects ] objects = iter(objects) try: first_object = next(objects) except StopIteration: return {} if not isinstance(first_object, Entity): throw(TypeError, 'Entity instance or a sequence of instances expected. Got: %r' % first_object) database = first_object._database_ bag = Bag(database) bag.put(first_object) bag.put(objects) return dict(bag.to_dict())
def init(converter, kwargs): attr = converter.attr if not attr.args: max_len = None elif len(attr.args) > 1: unexpected_args(attr, attr.args[1:]) else: max_len = attr.args[0] if issubclass(attr.py_type, (LongStr, LongUnicode)): if max_len is not None: throw(TypeError, 'Max length is not supported for CLOBs') elif max_len is None: max_len = 200 elif not isinstance(max_len, (int, long)): throw(TypeError, 'Max length argument must be int. Got: %r' % max_len) converter.max_len = max_len converter.db_encoding = kwargs.pop('db_encoding', None)
def validate(converter, val, obj=None): if isinstance(val, buffer): return val if isinstance(val, str): return buffer(val) throw(TypeError, "Attribute %r: expected type is 'buffer'. Got: %r" % (converter.attr, type(val)))
def init(converter, kwargs): attr = converter.attr args = attr.args if len(args) > 2: throw(TypeError, 'Too many positional parameters for Decimal ' '(expected: precision and scale), got: %s' % args) if args: precision = args[0] else: precision = kwargs.pop('precision', 12) if not isinstance(precision, int_types): throw(TypeError, "'precision' positional argument for attribute %s must be int. Got: %r" % (attr, precision)) if precision <= 0: throw(TypeError, "'precision' positional argument for attribute %s must be positive. Got: %r" % (attr, precision)) if len(args) == 2: scale = args[1] else: scale = kwargs.pop('scale', 2) if not isinstance(scale, int_types): throw(TypeError, "'scale' positional argument for attribute %s must be int. Got: %r" % (attr, scale)) if scale <= 0: throw(TypeError, "'scale' positional argument for attribute %s must be positive. Got: %r" % (attr, scale)) if scale > precision: throw(ValueError, "'scale' must be less or equal 'precision'") converter.precision = precision converter.scale = scale converter.exp = Decimal(10) ** -scale min_val = kwargs.pop('min', None) if min_val is not None: try: min_val = Decimal(min_val) except TypeError: throw(TypeError, "Invalid value for 'min' argument for attribute %s: %r" % (attr, min_val)) max_val = kwargs.pop('max', None) if max_val is not None: try: max_val = Decimal(max_val) except TypeError: throw(TypeError, "Invalid value for 'max' argument for attribute %s: %r" % (attr, max_val)) converter.min_val = min_val converter.max_val = max_val
def JSON_VALUE(builder, expr, path, type): throw(NotImplementedError)
def ARRAY_LENGTH(builder, array): throw(NotImplementedError)
def ARRAY_SLICE(builder, array, start, stop): throw(NotImplementedError)
def RETURN_VALUE(decompiler): if decompiler.pos != decompiler.end: throw(NotImplementedError) expr = decompiler.stack.pop() decompiler.stack.append(simplify(expr)) raise AstGenerated()
def setdefault(kwargs, key, value): kwargs_value = kwargs.setdefault(key, value) if value is not None and value != kwargs_value: throw(ValueError, 'Ambiguous value for ' + key)
def __init__(foreign_key, name, child_table, child_columns, parent_table, parent_columns, index_name): schema = parent_table.schema if schema is not child_table.schema: throw( DBSchemaError, 'Parent and child tables of foreign_key cannot belong to different schemata' ) for column in parent_columns: if column.table is not parent_table: throw( DBSchemaError, 'Column %r does not belong to table %r' % (column.name, parent_table.name)) for column in child_columns: if column.table is not child_table: throw( DBSchemaError, 'Column %r does not belong to table %r' % (column.name, child_table.name)) if len(parent_columns) != len(child_columns): throw(DBSchemaError, 'Foreign key columns count do not match') if child_columns in child_table.foreign_keys: if len(child_columns) == 1: throw( DBSchemaError, 'Foreign key for column %r already defined' % child_columns[0].name) else: throw( DBSchemaError, 'Foreign key for columns (%s) already defined' % ', '.join(repr(column.name) for column in child_columns)) if name is not None and name in schema.names: throw( DBSchemaError, 'Foreign key %s cannot be created, name is already in use' % name) Constraint.__init__(foreign_key, name, schema) child_table.foreign_keys[child_columns] = foreign_key if child_table is not parent_table: child_table.parent_tables.add(parent_table) parent_table.child_tables.add(child_table) foreign_key.parent_table = parent_table foreign_key.parent_columns = parent_columns foreign_key.child_table = child_table foreign_key.child_columns = child_columns if index_name is not False: child_columns_len = len(child_columns) if all(columns[:child_columns_len] != child_columns for columns in child_table.indexes): child_table.add_index(index_name, child_columns, is_pk=False, is_unique=False, m2m=bool(child_table.m2m))
def __init__(index, name, table, columns, is_pk=False, is_unique=None): assert len(columns) > 0 for column in columns: if column.table is not table: throw( DBSchemaError, "Column %r does not belong to table %r and cannot be part of its index" % (column.name, table.name)) if columns in table.indexes: if len(columns) == 1: throw(DBSchemaError, "Index for column %r already exists" % columns[0].name) else: throw( DBSchemaError, "Index for columns (%s) already exists" % ', '.join(repr(column.name) for column in columns)) if is_pk: if table.pk_index is not None: throw( DBSchemaError, 'Primary key for table %r is already defined' % table.name) table.pk_index = index if is_unique is None: is_unique = True elif not is_unique: throw( DBSchemaError, "Incompatible combination of is_unique=False and is_pk=True" ) elif is_unique is None: is_unique = False schema = table.schema if name is not None and name in schema.names: throw(DBSchemaError, 'Index %s cannot be created, name is already in use' % name) Constraint.__init__(index, name, schema) for column in columns: column.is_pk = column.is_pk or (len(columns) == 1 and is_pk) column.is_pk_part = column.is_pk_part or bool(is_pk) column.is_unique = column.is_unique or (is_unique and len(columns) == 1) table.indexes[columns] = index index.table = table index.columns = columns index.is_pk = is_pk index.is_unique = is_unique
def JSON_ARRAY_LENGTH(builder, value): throw(NotImplementedError)
def JSON_CONTAINS(builder, expr, path, key): throw(NotImplementedError)
def JSON_CONCAT(builder, left, right): throw(NotImplementedError)
def postAssName(translator, node): if node.flags != 'OP_ASSIGN': throw(TypeError) name = node.name if name.startswith('__'): throw(TranslationError, 'Illegal name: %r' % name) translator.contexts[-1].add(name)
def sql2py(converter, val): if isinstance(val, datetime): return val.date() if not isinstance(val, date): throw(ValueError, 'Value of unexpected type received from database: instead of date got %s', type(val)) return val
def STORE_FAST(decompiler, varname): if varname.startswith('_['): throw(InvalidQuery('Use generator expression (... for ... in ...) ' 'instead of list comprehension [... for ... in ...] inside query')) decompiler.assnames.add(varname) decompiler.store(ast.AssName(varname, 'OP_ASSIGN'))
def ARRAY_SUBSET(builder, array1, not_in, array2): throw(NotImplementedError)
def init(self, kwargs): dbapiprovider.IntConverter.init(self, kwargs) sequence_name = kwargs.pop('sequence_name', None) if sequence_name is not None and not (self.attr.auto and self.attr.is_pk): throw(TypeError, "Parameter 'sequence_name' can be used only for PrimaryKey attributes with auto=True")
def LIST_APPEND(decompiler): throw(NotImplementedError)
def MAKE_ARRAY(builder, *items): throw(NotImplementedError)
def NOT_IN(builder, expr1, x): if not x: throw(AstError, 'Empty IN clause') if len(x) >= 1 and x[0] == 'SELECT': return builder(expr1), ' NOT IN ', builder(x) expr_list = [builder(expr) for expr in x] return builder(expr1), ' NOT IN (', join(', ', expr_list), ')'
def LIST_APPEND(decompiler, offset=None): throw(InvalidQuery('Use generator expression (... for ... in ...) ' 'instead of list comprehension [... for ... in ...] inside query'))
def default_post(translator, node): throw(NotImplementedError, node)
def __init__(converter, provider, py_type, attr=None): if attr is not None: throw(TypeError, 'Attribute %s has invalid type NoneType' % attr) Converter.__init__(converter, provider, py_type)
def JSON_QUERY(builder, expr, path): throw(NotImplementedError)
def init(converter, kwargs): Converter.init(converter, kwargs) attr = converter.attr min_val = kwargs.pop('min', None) if min_val is not None and not isinstance(min_val, int_types): throw(TypeError, "'min' argument for attribute %s must be int. Got: %r" % (attr, min_val)) max_val = kwargs.pop('max', None) if max_val is not None and not isinstance(max_val, int_types): throw(TypeError, "'max' argument for attribute %s must be int. Got: %r" % (attr, max_val)) size = kwargs.pop('size', None) if size is None: if attr.py_type.__name__ == 'long': deprecated(9, "Attribute %s: 'long' attribute type is deprecated. " "Please use 'int' type with size=64 option instead" % attr) attr.py_type = int size = 64 elif attr.py_type.__name__ == 'long': throw(TypeError, "Attribute %s: 'size' option cannot be used with long type. Please use int type instead" % attr) elif not isinstance(size, int_types): throw(TypeError, "'size' option for attribute %s must be of int type. Got: %r" % (attr, size)) elif size not in (8, 16, 24, 32, 64): throw(TypeError, "incorrect value of 'size' option for attribute %s. " "Should be 8, 16, 24, 32 or 64. Got: %d" % (attr, size)) unsigned = kwargs.pop('unsigned', False) if unsigned is not None and not isinstance(unsigned, bool): throw(TypeError, "'unsigned' option for attribute %s must be of bool type. Got: %r" % (attr, unsigned)) if size == 64 and unsigned and not converter.provider.uint64_support: throw(TypeError, 'Attribute %s: %s provider does not support unsigned bigint type' % (attr, converter.provider.dialect)) if unsigned is not None and size is None: size = 32 lowest = highest = None if size: highest = highest = 2 ** size - 1 if unsigned else 2 ** (size - 1) - 1 lowest = 0 if unsigned else -(2 ** (size - 1)) if highest is not None and max_val is not None and max_val > highest: throw(ValueError, "'max' argument should be less or equal to %d because of size=%d and unsigned=%s. " "Got: %d" % (highest, size, max_val, unsigned)) if lowest is not None and min_val is not None and min_val < lowest: throw(ValueError, "'min' argument should be greater or equal to %d because of size=%d and unsigned=%s. " "Got: %d" % (lowest, size, min_val, unsigned)) converter.min_val = min_val or lowest converter.max_val = max_val or highest converter.size = size converter.unsigned = unsigned
def RETURN_VALUE(decompiler): if decompiler.next_pos != decompiler.end: throw(DecompileError) expr = decompiler.stack.pop() return simplify(expr)
def ARRAY_CONTAINS(builder, key, not_in, col): throw(NotImplementedError)
def fk_exists(provider, connection, table_name, fk_name, case_sensitive=True): throw(NotImplementedError)
def JSON_NONZERO(builder, expr): throw(NotImplementedError)
def JSON_ARRAY_LENGTH(builder, value): throw(TranslationError, 'Oracle does not provide `length` function for JSON arrays')