Esempio n. 1
0
    def __init__(self, local_key, remote_key, many, on_remote):
        assert type(local_key) is tuple and type(remote_key) is tuple

        self.local_key = local_key
        self.remote_key = remote_key

        self.local_cls = getattr(self.local_key[0], "cls", None)
        self.remote_cls = self.remote_key[0].cls
        self.remote_key_is_primary = False

        primary_key = get_cls_info(self.remote_cls).primary_key
        if len(primary_key) == len(self.remote_key):
            for column1, column2 in zip(self.remote_key, primary_key):
                if column1.name != column2.name:
                    break
            else:
                self.remote_key_is_primary = True

        self.many = many
        self.on_remote = on_remote

        # XXX These should probably be weak dictionaries.
        self._local_columns = {}
        self._remote_columns = {}

        self._l_to_r = {}
        self._r_to_l = {}
Esempio n. 2
0
    def event_key(self):
        """See `ILongPollEvent`.

        Constructs the key from the table name of the Storm class.
        """
        cls_info = get_cls_info(self.source)
        return generate_event_key(cls_info.table.name.lower())
Esempio n. 3
0
        def run_test():
            yield tables.runCreateTable(Node)

            count = tables.count(Node)
            self.assertEqual(count, 0)

            store = config.main.zstorm.get('main_store')
            nodetest = Node()
            nodetest.name = u"test"
            nodetest.description = u"test"
            nodetest.hidden_service = u"test"
            nodetest.public_site = u"test"
            nodetest.email = u"*****@*****.**"
            nodetest.private_stats_update_time = 30 # minutes
            nodetest.public_stats_update_time = 120 # minutes
            nodetest.languages = [ { "code" : "it" , "name": "Italiano"}, { "code" : "en" , "name" : "English" }]
            store.add(nodetest)

            count = tables.count(Node)
            self.assertEqual(count, 1)

            # select & verify
            node = store.find(Node, 1 == Node.id).one()
            cls_info = get_cls_info(Node)
            for name in cls_info.attributes.iterkeys():
                self.assertEqual(getattr(node, name, ""), getattr(nodetest, name, ""))
Esempio n. 4
0
    def __init__(self, local_key, remote_key, many, on_remote):
        assert type(local_key) is tuple and type(remote_key) is tuple

        self.local_key = local_key
        self.remote_key = remote_key

        self.local_cls = getattr(self.local_key[0], "cls", None)
        self.remote_cls = self.remote_key[0].cls
        self.remote_key_is_primary = False

        primary_key = get_cls_info(self.remote_cls).primary_key
        if len(primary_key) == len(self.remote_key):
            for column1, column2 in zip(self.remote_key, primary_key):
                if column1.name != column2.name:
                    break
            else:
                self.remote_key_is_primary = True

        self.many = many
        self.on_remote = on_remote

        # XXX These should probably be weak dictionaries.
        self._local_columns = {}
        self._remote_columns = {}

        self._l_to_r = {}
        self._r_to_l = {}
Esempio n. 5
0
def determine_table_and_fragment(table, ftq):
    table = get_cls_info(table).table
    if ftq:
        query_fragment = "ftq(?)"
    else:
        query_fragment = "?::tsquery"
    return table, query_fragment
Esempio n. 6
0
    def _load_object(self, cls_info, result, values):
        """Create an object from values loaded from the database.

        @param cls_info: The C{ClassInfo} for the row being loaded.
        @param result: The database result set.
        @param values: The database values.
        @return: A new instances of the class mapped to the table being
            loaded.
        """
        if not any(values):
            # We've got a row full of NULLs, so consider that the object
            # wasn't found.  This is useful for joins, where non-existent rows
            # are represented like that.
            return None

        # Build a new instance.  We need the cls_info columns for the class of
        # the actual object, not from a possible wrapper (e.g. an alias).
        cls = cls_info.cls
        cls_info = get_cls_info(cls)
        index = {}
        for attributeName, propertyColumn in cls_info.attributes.iteritems():
            index[propertyColumn.name] = attributeName

        # Build a new instance and populate it with values from the database.
        obj = cls.__new__(cls)
        for column, value in zip(cls_info.columns, values):
            variable = column.variable_factory(value=value, from_db=True)
            attributeName = index[column.name]
            setattr(obj, attributeName, variable.get())
        return obj
Esempio n. 7
0
    def _load_object(self, cls_info, result, values):
        """Create an object from values loaded from the database.

        @param cls_info: The C{ClassInfo} for the row being loaded.
        @param result: The database result set.
        @param values: The database values.
        @return: A new instances of the class mapped to the table being
            loaded.
        """
        if not any(values):
            # We've got a row full of NULLs, so consider that the object
            # wasn't found.  This is useful for joins, where non-existent rows
            # are represented like that.
            return None

        # Build a new instance.  We need the cls_info columns for the class of
        # the actual object, not from a possible wrapper (e.g. an alias).
        cls = cls_info.cls
        cls_info = get_cls_info(cls)
        index = {}
        for attributeName, propertyColumn in cls_info.attributes.iteritems():
            index[propertyColumn.name] = attributeName

        # Build a new instance and populate it with values from the database.
        obj = cls.__new__(cls)
        for column, value in zip(cls_info.columns, values):
            variable = column.variable_factory(value=value, from_db=True)
            attributeName = index[column.name]
            setattr(obj, attributeName, variable.get())
        return obj
Esempio n. 8
0
 def __getattr__(self, attr):
     if attr.startswith('__'):
         raise AttributeError(attr)
     elif attr == 'id':
         cls_info = get_cls_info(self._cls)
         return cls_info.primary_key[0]
     else:
         return getattr(self._cls, attr)
Esempio n. 9
0
    def event_key(self):
        """See `ILongPollEvent`.

        Constructs the key from the table name of the Storm class.
        """
        cls_info = get_cls_info(self.source)
        return generate_event_key(
            cls_info.table.name.lower())
Esempio n. 10
0
 def __getattr__(self, attr):
     if attr.startswith("__"):
         raise AttributeError(attr)
     elif attr == "id":
         cls_info = get_cls_info(self._cls)
         return cls_info.primary_key[0]
     else:
         return getattr(self._cls, attr)
    def get(self, cls, key, **options):
        """Get object of type cls with the given primary key from the database.

        If the object is alive the database won't be touched.

        @param cls: Class of the object to be retrieved.
        @param key: Primary key of object. May be a tuple for composed keys.

        @return: The object found with the given primary key, or None
            if no object is found.
        """
        for_update_nowait = options.get("for_update_nowait")

        if for_update_nowait is not True:
            for_update_nowait = False

        if self._implicit_flush_block_count == 0:
            self.flush()

        if type(key) != tuple:
            key = (key, )

        cls_info = get_cls_info(cls)

        assert len(key) == len(cls_info.primary_key)

        primary_vars = []
        for column, variable in zip(cls_info.primary_key, key):
            if not isinstance(variable, Variable):
                variable = column.variable_factory(value=variable)
            primary_vars.append(variable)

        primary_values = tuple(var.get(to_db=True) for var in primary_vars)
        obj_info = self._alive.get((cls_info.cls, primary_values))
        if obj_info is not None and not obj_info.get("invalidated"):
            return self._get_object(obj_info)

        where = compare_columns(cls_info.primary_key, primary_vars)

        select = Select(cls_info.columns,
                        where,
                        default_tables=cls_info.table,
                        limit=1,
                        for_update_nowait=for_update_nowait)
        try:
            result = self._connection.execute(select)
        except Exception as db_error:
            if db_error[0].code == RESOURCE_BUSY_ERROR_CODE:  # Resource busy
                raise DatabaseResourceBusyException(
                    _(RESOURCE_BUSY_ERROR_MESSAGE))

            raise db_error

        values = result.get_one()
        if values is None:
            return None
        return self._load_object(cls_info, result, values)
Esempio n. 12
0
    def get_foreign_columns(self):
        info = get_cls_info(self.orm_type)
        for name, attr in info.attributes.items():
            if not name.endswith('ID'):
                continue

            name = name[:-2]
            ref = getattr(self.orm_type, name)
            other_class = ref._remote_key.split('.')[0]
            yield name, other_class
Esempio n. 13
0
    def get_foreign_columns(self):
        info = get_cls_info(self.orm_type)
        for name, attr in info.attributes.items():
            if not name.endswith('ID'):
                continue

            name = name[:-2]
            ref = getattr(self.orm_type, name)
            other_class = ref._remote_key.split('.')[0]
            yield name, other_class
Esempio n. 14
0
 def clear(self, *args, **kwargs):
     store = Store.of(self._local)
     if store is None:
         raise NoStoreError("Can't perform operation without a store")
     where = self._relation1.get_where_for_remote(self._local)
     if args or kwargs:
         filter = get_where_for_args(args, kwargs, self._target_cls)
         join = self._relation2.get_where_for_join()
         table = get_cls_info(self._target_cls).table
         where &= Exists(Select(SQLRaw("*"), join & filter, tables=table))
     store.find(self._link_cls, where).remove()
Esempio n. 15
0
 def clear(self, *args, **kwargs):
     store = Store.of(self._local)
     if store is None:
         raise NoStoreError("Can't perform operation without a store")
     where = self._relation1.get_where_for_remote(self._local)
     if args or kwargs:
         filter = get_where_for_args(args, kwargs, self._target_cls)
         join = self._relation2.get_where_for_join()
         table = get_cls_info(self._target_cls).table
         where &= Exists(Select(SQLRaw("*"), join & filter, tables=table))
     store.find(self._link_cls, where).remove()
Esempio n. 16
0
    def set_from_template(self, template):
        if not template:
            return

        for column in get_cls_info(template.__class__).columns:
            if column.name in ['product_tax_template_id', 'te_id', 'id']:
                continue

            value = getattr(template, column.name)
            setattr(self, column.name, value)

        self.set_initial_values()
Esempio n. 17
0
    def set_from_template(self, template):
        if not template:
            return

        for column in get_cls_info(template.__class__).columns:
            if column.name in ['product_tax_template_id', 'te_id', 'id']:
                continue

            value = getattr(template, column.name)
            setattr(self, column.name, value)

        self.set_initial_values()
Esempio n. 18
0
def gen_reload_queries(objects):
    """Prepare queries to reload the given objects."""
    for object_type, objects in collate(objects, get_type):
        primary_key = get_cls_info(object_type).primary_key
        if len(primary_key) != 1:
            raise AssertionError(
                "Compound primary keys are not supported: %s." %
                object_type.__name__)
        primary_key_column = primary_key[0]
        primary_key_column_getter = primary_key_column.__get__
        for store, objects in collate(objects, Store.of):
            primary_keys = map(primary_key_column_getter, objects)
            condition = primary_key_column.is_in(primary_keys)
            yield store.find(object_type, condition)
Esempio n. 19
0
def gen_reload_queries(objects):
    """Prepare queries to reload the given objects."""
    for object_type, objects in collate(objects, get_type):
        primary_key = get_cls_info(object_type).primary_key
        if len(primary_key) != 1:
            raise AssertionError(
                "Compound primary keys are not supported: %s." %
                object_type.__name__)
        primary_key_column = primary_key[0]
        primary_key_column_getter = primary_key_column.__get__
        for store, objects in collate(objects, Store.of):
            primary_keys = map(primary_key_column_getter, objects)
            condition = primary_key_column.is_in(primary_keys)
            yield store.find(object_type, condition)
Esempio n. 20
0
 def add_class(self, cls):
     """Register properties of C{cls} so that they may be found by C{get()}.
     """
     suffix = cls.__module__.split(".")
     suffix.append(cls.__name__)
     suffix.reverse()
     suffix = ".%s." % ".".join(suffix)
     cls_info = get_cls_info(cls)
     for attr in cls_info.attributes:
         prop = cls_info.attributes[attr]
         prop_ref = weakref.KeyedRef(prop, self._remove, None)
         pair = (attr + suffix, prop_ref)
         prop_ref.key = pair
         insort_left(self._properties, pair)
Esempio n. 21
0
def _primary_key(object_type, allow_compound=False):
    """Get a primary key our helpers can use.

    :raises AssertionError if the key is missing or unusable.
    """
    primary_key = get_cls_info(object_type).primary_key
    if len(primary_key) == 1:
        return primary_key[0]
    else:
        if not allow_compound:
            raise AssertionError(
                "Compound primary keys are not supported: %s." %
                object_type.__name__)
        return primary_key
Esempio n. 22
0
def create(columns, values, get_objects=False, get_primary_keys=False):
    """Create a large number of objects efficiently.

    :param columns: The Storm columns to insert values into. Must be from a
        single class.
    :param values: A list of lists of values for the columns.
    :param get_objects: Return the created objects.
    :param get_primary_keys: Return the created primary keys.
    :return: A list of the created objects if get_created, otherwise None.
    """
    # Flatten Reference faux-columns into their primary keys.
    db_cols = list(chain.from_iterable(map(dbify_column, columns)))
    clses = set(col.cls for col in db_cols)
    if len(clses) != 1:
        raise ValueError(
            "The Storm columns to insert values into must be from a single "
            "class.")
    if get_objects and get_primary_keys:
        raise ValueError(
            "get_objects and get_primary_keys are mutually exclusive.")

    if len(values) == 0:
        return [] if (get_objects or get_primary_keys) else None

    [cls] = clses
    primary_key = get_cls_info(cls).primary_key

    # Mangle our value list into compilable values. Normal columns just
    # get passed through the variable factory, while References get
    # squashed into primary key variables.
    db_values = [
        list(
            chain.from_iterable(
                dbify_value(col, val) for col, val in zip(columns, value)))
        for value in values
    ]

    if get_objects or get_primary_keys:
        result = IStore(cls).execute(
            Returning(
                Insert(db_cols, values=db_values,
                       primary_columns=primary_key)))
        keys = map(itemgetter(0), result) if len(primary_key) == 1 else result
        if get_objects:
            return load(cls, keys)
        else:
            return list(keys)
    else:
        IStore(cls).execute(Insert(db_cols, values=db_values))
        return None
Esempio n. 23
0
def _primary_key(object_type, allow_compound=False):
    """Get a primary key our helpers can use.

    :raises AssertionError if the key is missing or unusable.
    """
    primary_key = get_cls_info(object_type).primary_key
    if len(primary_key) == 1:
        return primary_key[0]
    else:
        if not allow_compound:
            raise AssertionError(
                "Compound primary keys are not supported: %s." %
                object_type.__name__)
        return primary_key
Esempio n. 24
0
 def add_class(self, cls):
     """Register properties of C{cls} so that they may be found by C{get()}.
     """
     suffix = cls.__module__.split(".")
     suffix.append(cls.__name__)
     suffix.reverse()
     suffix = ".%s." % ".".join(suffix)
     cls_info = get_cls_info(cls)
     for attr in cls_info.attributes:
         prop = cls_info.attributes[attr]
         prop_ref = weakref.KeyedRef(prop, self._remove, None)
         pair = (attr+suffix, prop_ref)
         prop_ref.key = pair
         insort_left(self._properties, pair)
Esempio n. 25
0
def create(columns, values, get_objects=False,
           get_primary_keys=False):
    """Create a large number of objects efficiently.

    :param columns: The Storm columns to insert values into. Must be from a
        single class.
    :param values: A list of lists of values for the columns.
    :param get_objects: Return the created objects.
    :param get_primary_keys: Return the created primary keys.
    :return: A list of the created objects if get_created, otherwise None.
    """
    # Flatten Reference faux-columns into their primary keys.
    db_cols = list(chain.from_iterable(map(dbify_column, columns)))
    clses = set(col.cls for col in db_cols)
    if len(clses) != 1:
        raise ValueError(
            "The Storm columns to insert values into must be from a single "
            "class.")
    if get_objects and get_primary_keys:
        raise ValueError(
            "get_objects and get_primary_keys are mutually exclusive.")

    if len(values) == 0:
        return [] if (get_objects or get_primary_keys) else None

    [cls] = clses
    primary_key = get_cls_info(cls).primary_key

    # Mangle our value list into compilable values. Normal columns just
    # get passed through the variable factory, while References get
    # squashed into primary key variables.
    db_values = [
        list(chain.from_iterable(
            dbify_value(col, val) for col, val in zip(columns, value)))
        for value in values]

    if get_objects or get_primary_keys:
        result = IStore(cls).execute(
            Returning(Insert(
                db_cols, values=db_values, primary_columns=primary_key)))
        keys = map(itemgetter(0), result) if len(primary_key) == 1 else result
        if get_objects:
            return load(cls, keys)
        else:
            return list(keys)
    else:
        IStore(cls).execute(Insert(db_cols, values=db_values))
        return None
Esempio n. 26
0
    def set_item_tax(self, invoice_item, template=None):
        """ Set the tax of an invoice item.

        :param invoice_item: the item of in/out invoice
        """
        template = template or self.get_tax_template(invoice_item)
        if not template:
            return

        for column in get_cls_info(template.__class__).columns:
            if column.name in ['product_tax_template_id', 'te_id', 'id']:
                continue

            value = getattr(template, column.name)
            setattr(self, column.name, value)

        self.set_initial_values(invoice_item)
Esempio n. 27
0
    def clone(self):
        """Get a persistent copy of an existent object. Remember that we can
        not use copy because this approach will not activate ORMObject
        methods which allow creating persitent objects. We also always
        need a new id for each copied object.

        :returns: the copy of ourselves
        """
        warnings.warn("don't use this", DeprecationWarning, stacklevel=2)
        kwargs = {}
        for column in get_cls_info(self.__class__).columns:
            # FIXME: Make sure this is cloning correctly
            name = column.name
            if name in ['id', 'identifier', 'te_id']:
                continue
            if name.endswith('_id'):
                name = name[:-3]
            kwargs[name] = getattr(self, name)

        klass = type(self)
        return klass(store=self.store, **kwargs)
Esempio n. 28
0
File: base.py Progetto: stoq/stoq
    def clone(self):
        """Get a persistent copy of an existent object. Remember that we can
        not use copy because this approach will not activate ORMObject
        methods which allow creating persitent objects. We also always
        need a new id for each copied object.

        :returns: the copy of ourselves
        """
        warnings.warn("don't use this", DeprecationWarning, stacklevel=2)
        kwargs = {}
        for column in get_cls_info(self.__class__).columns:
            # FIXME: Make sure this is cloning correctly
            name = column.name
            if name in ["id", "identifier", "te_id"]:
                continue
            if name.endswith("_id"):
                name = name[:-3]
            kwargs[name] = getattr(self, name)

        klass = type(self)
        return klass(store=self.store, **kwargs)
Esempio n. 29
0
    def save(self, row, column_vals):
        from storm.info import get_cls_info
        from storm.locals import ReferenceSet, Store

        cls_info = get_cls_info(self.medium)

        column_vals = list(column_vals)
        pk = []
        for n, v in column_vals:
            propid = id(getattr(self.medium, n))
            if propid in cls_info.primary_key_idx:
                pk.append((cls_info.primary_key_idx[propid], v, n))

        assert len(pk) == 0 or len(pk) == len(cls_info.primary_key), (
            "Incomplete primary key see %s need %s" %
            ([x[2] for x in pk], [x.name for x in cls_info.primary_key]))

        if pk:
            obj = self.transaction.get(self.medium,
                                       tuple([x[1] for x in sorted(pk)]))
        else:
            obj = None

        if obj is None:
            obj = self.medium()
            self.transaction.add(obj)

        assert Store.of(obj) is self.transaction

        for n, v in column_vals:
            if isinstance(getattr(self.medium, n), ReferenceSet):
                getattr(obj, n).add(v)
            else:
                setattr(obj, n, v)

        self.transaction.flush()
        stlog.info("%s %s", obj, [(n, getattr(obj, n)) for n in row.columns()])

        return obj
Esempio n. 30
0
    def _dump_model(self, ns, model):
        if model is None:
            self.output += 'model: None\n'
            return
        self.output += 'model: %s\n' % (ns[model], )
        info = get_cls_info(type(model))
        for col in info.columns:
            if col.name == 'id' or col.name == 'identifier':
                continue
            if col.name.endswith('_id'):
                value = getattr(model, col.name[:-3], None)
                if value in ns:
                    self.output += '  %s: %s\n' % (col.name, ns[value])
                continue

            value = getattr(model, col.name, None)
            if isinstance(value, datetime.datetime):
                # Strip hours/minutes/seconds so today() works
                value = datetime.datetime(value.year, value.month, value.day)

            self.output += '  %s: %r\n' % (col.name, value)
        self.output += '\n'
Esempio n. 31
0
    def save(self, row, column_vals):
        from storm.info import get_cls_info
        from storm.locals import ReferenceSet, Store

        cls_info = get_cls_info(self.medium)

        column_vals = list(column_vals)
        pk = []
        for n, v in column_vals:
            propid = id(getattr(self.medium, n))
            if propid in cls_info.primary_key_idx:
                pk.append((cls_info.primary_key_idx[propid], v, n))

        assert len(pk) == 0 or len(pk) == len(cls_info.primary_key), (
            "Incomplete primary key see %s need %s" % (
                [x[2] for x in pk], [x.name for x in cls_info.primary_key]))

        if pk:
            obj = self.transaction.get(self.medium, tuple([x[1] for x in sorted(pk)]))
        else:
            obj = None

        if obj is None:
            obj = self.medium()
            self.transaction.add(obj)

        assert Store.of(obj) is self.transaction

        for n, v in column_vals:
            if isinstance(getattr(self.medium,n), ReferenceSet):
                getattr(obj, n).add(v)
            else:
                setattr(obj, n, v)

        self.transaction.flush()
        stlog.info("%s %s", obj, [(n,getattr(obj,n)) for n in row.columns()])

        return obj
Esempio n. 32
0
def encode_storm_object(object):
    ''' Serializes to JSON a Storm object
    
    Use:
        from storm.info import get_cls_info
        import json
        ...
        storm_object = get_storm_object()
        print json.dumps(storm_object, default=encode_storm_object)
            
    Warnings:
        Serializes objects containing Int, Date and Unicode data types
        other datatypes are not tested. MUST be improved
    '''
    if not hasattr(object, "__storm_table__"):
        raise TypeError(repr(object) + " is not JSON serializable")
    result = {}
    cls_info = get_cls_info(object.__class__)
    for name in cls_info.attributes.iterkeys():
        value= getattr(object, name)
        if (isinstance(value, date)): 
            value= str(value)
        result[name] = value
    return result
Esempio n. 33
0
def encode_storm_object(object):
    ''' Serializes to JSON a Storm object
    
    Use:
        from storm.info import get_cls_info
        import json
        ...
        storm_object = get_storm_object()
        print json.dumps(storm_object, default=encode_storm_object)
            
    Warnings:
        Serializes objects containing Int, Date and Unicode data types
        other datatypes are not tested. MUST be improved
    '''
    if not hasattr(object, "__storm_table__"):
        raise TypeError(repr(object) + " is not JSON serializable")
    result = {}
    cls_info = get_cls_info(object.__class__)
    for name in cls_info.attributes.iterkeys():
        value = getattr(object, name)
        if (isinstance(value, date)):
            value = str(value)
        result[name] = value
    return result
Esempio n. 34
0
    def _dump_model(self, ns, model):
        if model is None:
            self.output += 'model: None\n'
            return
        self.output += 'model: %s\n' % (ns[model], )
        info = get_cls_info(type(model))
        for col in info.columns:
            if col.name == 'id' or col.name == 'identifier':
                continue
            if col.name.endswith('_id'):
                value = getattr(model, col.name[:-3], None)
                if value in ns:
                    self.output += '  %s: %s\n' % (col.name, ns[value])
                continue

            value = getattr(model, col.name, None)
            if isinstance(value, datetime.datetime):
                # Strip hours/minutes/seconds so today() works
                value = datetime.datetime(value.year,
                                          value.month,
                                          value.day)

            self.output += '  %s: %r\n' % (col.name, value)
        self.output += '\n'
Esempio n. 35
0
def compile_type(compile, expr, state):
    cls_info = get_cls_info(expr)
    table = compile(cls_info.table, state)
    if state.context is TABLE and issubclass(expr, ClassAlias):
        return "%s %s" % (compile(cls_info.cls, state), table)
    return table
Esempio n. 36
0
 def __init__(self, db, cls):
     '''Construct new TableSchema
     
     @param db: reference to database object (i.e. returned by create_database)
     @param cls: db object class
     
     NOTE: uses db class name to determine database system (SQLite, MySQL, etc.)'''
     self.cls = cls
     self.dbClass = db.__class__.__name__
     
     colTypeTable = _columnTypes[self.dbClass]
     
     self.cls_info = get_cls_info(cls)
     
     self.columns = []
     self.constraints = []
     self.unique = []
     
     self.tableName = self.cls_info.table.name
     
     for var, col in cls._storm_columns.items():
         # Get column type from table
         
         # XXX: This hack relies on VariableFactory implementation that uses
         # functools.partial (see storm/variables.py). Don't know, how to do
         # it better because we loose any information about variable class
         # after get_cls_info()
         colType = colTypeTable[col.variable_factory.func]    
         if callable(colType):
             colType = colType(col)
         
         colSpec = [col.name, colType]
         
         # Add options (NOT NULL, PRIMARY KEY + AUTOINCREMENT, DEFAULT, UNIQUE)
         default = getattr(col, 'default', None)
         if default is not None:
             colSpec.extend('DEFAULT %s' % (colType, repr(default)))
             
         uniq = getattr(var, 'unique', False)
         if uniq:
             self.unique.append(col)
             
             if self.dbClass == 'SQLite':
                 colSpec.append('UNIQUE')
             else:
                 constraint = ConstraintImpl(_uniqueConstraint[self.dbClass],
                                             col.name, [col])
                 self.constraints.append(constraint)
         
         # col == pk generates SQL expression Eq, so bool(Eq) == True in any case
         # In this case, 'in' operator will return True in any case too 
         if any(pk is col 
                for pk 
                in self.cls_info.primary_key):
             colSpec.append('PRIMARY KEY')
             colSpec.append(_autoIncrement[self.dbClass])
         
         allowNone = var._variable_kwargs.get('allow_none', False)
         if not allowNone:
             colSpec.append('NOT NULL')
         
         self.columns.append(colSpec)
     
     for conName, con in cls.__dict__.iteritems():
         if isinstance(con, UniqueConstraint):
             constraint = UniqueConstraintImpl(conName,
                                               [prop._get_column(cls) 
                                                for prop 
                                                in con.columns])
             self.constraints.append(constraint)
Esempio n. 37
0
 def get_column_names(self):
     info = get_cls_info(self.orm_type)
     for name, attr in info.attributes.items():
         yield name
Esempio n. 38
0
    def __new__(cls, name, bases, dict):
        if Storm in bases or SQLObjectBase in bases:
            # Do not parse abstract base classes.
            return type.__new__(cls, name, bases, dict)

        style = cls._get_attr("_style", bases, dict)
        if style is None:
            dict["_style"] = style = SQLObjectStyle()

        table_name = cls._get_attr("_table", bases, dict)
        if table_name is None:
            table_name = style.pythonClassToDBTable(name)

        id_name = cls._get_attr("_idName", bases, dict)
        if id_name is None:
            id_name = style.idForTable(table_name)

        # Handle this later to call _parse_orderBy() on the created class.
        default_order = cls._get_attr("_defaultOrder", bases, dict)

        dict["__storm_table__"] = table_name

        attr_to_prop = {}
        for attr, prop in dict.items():
            attr_to_prop[attr] = attr
            if isinstance(prop, ForeignKey):
                db_name = prop.kwargs.get("dbName", attr)
                local_prop_name = style.instanceAttrToIDAttr(attr)
                dict[local_prop_name] = local_prop = Int(
                    db_name, allow_none=not prop.kwargs.get("notNull", False),
                    validator=prop.kwargs.get("storm_validator", None))
                dict[attr] = Reference(local_prop,
                                       "%s.<primary key>" % prop.foreignKey)
                attr_to_prop[attr] = local_prop_name
            elif isinstance(prop, PropertyAdapter):
                db_name = prop.dbName or attr
                method_name = prop.alternateMethodName
                if method_name is None and prop.alternateID:
                    method_name = "by" + db_name[0].upper() + db_name[1:]
                if method_name is not None:
                    def func(cls, key, attr=attr):
                        store = cls._get_store()
                        obj = store.find(cls, getattr(cls, attr) == key).one()
                        if obj is None:
                            raise SQLObjectNotFound
                        return obj
                    func.func_name = method_name
                    dict[method_name] = classmethod(func)
            elif isinstance(prop, SQLMultipleJoin):
                # Generate addFoo/removeFoo names.
                def define_add_remove(dict, prop):
                    capitalised_name = (prop._otherClass[0].capitalize() +
                                        prop._otherClass[1:])
                    def add(self, obj):
                        prop._get_bound_reference_set(self).add(obj)
                    add.__name__ = "add" + capitalised_name
                    dict.setdefault(add.__name__, add)

                    def remove(self, obj):
                        prop._get_bound_reference_set(self).remove(obj)
                    remove.__name__ = "remove" + capitalised_name
                    dict.setdefault(remove.__name__, remove)
                define_add_remove(dict, prop)


        id_type = dict.setdefault("_idType", int)
        id_cls = {int: Int, str: RawStr, unicode: AutoUnicode}[id_type]
        dict["id"] = id_cls(id_name, primary=True, default=AutoReload)
        attr_to_prop[id_name] = "id"

        # Notice that obj is the class since this is the metaclass.
        obj = super(SQLObjectMeta, cls).__new__(cls, name, bases, dict)

        property_registry = obj._storm_property_registry

        property_registry.add_property(obj, getattr(obj, "id"),
                                       "<primary key>")

        # Let's explore this same mechanism to register table names,
        # so that we can find them to handle prejoinClauseTables.
        property_registry.add_property(obj, getattr(obj, "id"),
                                       "<table %s>" % table_name)

        for fake_name, real_name in attr_to_prop.items():
            prop = getattr(obj, real_name)
            if fake_name != real_name:
                property_registry.add_property(obj, prop, fake_name)
            attr_to_prop[fake_name] = prop

        obj._attr_to_prop = attr_to_prop

        if default_order is not None:
            cls_info = get_cls_info(obj)
            cls_info.default_order = obj._parse_orderBy(default_order)

        return obj
Esempio n. 39
0
 def get_column_names(self):
     info = get_cls_info(self.orm_type)
     for name, attr in info.attributes.items():
         yield name
Esempio n. 40
0
    def __new__(cls, name, bases, dict):
        if Storm in bases or SQLObjectBase in bases:
            # Do not parse abstract base classes.
            return type.__new__(cls, name, bases, dict)

        style = cls._get_attr("_style", bases, dict)
        if style is None:
            dict["_style"] = style = SQLObjectStyle()

        table_name = cls._get_attr("_table", bases, dict)
        if table_name is None:
            table_name = style.pythonClassToDBTable(name)

        id_name = cls._get_attr("_idName", bases, dict)
        if id_name is None:
            id_name = style.idForTable(table_name)

        # Handle this later to call _parse_orderBy() on the created class.
        default_order = cls._get_attr("_defaultOrder", bases, dict)

        dict["__storm_table__"] = table_name

        attr_to_prop = {}
        for attr, prop in dict.items():
            attr_to_prop[attr] = attr
            if isinstance(prop, ForeignKey):
                db_name = prop.kwargs.get("dbName", attr)
                local_prop_name = style.instanceAttrToIDAttr(attr)
                dict[local_prop_name] = local_prop = Int(
                    db_name,
                    allow_none=not prop.kwargs.get("notNull", False),
                    validator=prop.kwargs.get("storm_validator", None))
                dict[attr] = Reference(local_prop,
                                       "%s.<primary key>" % prop.foreignKey)
                attr_to_prop[attr] = local_prop_name
            elif isinstance(prop, PropertyAdapter):
                db_name = prop.dbName or attr
                method_name = prop.alternateMethodName
                if method_name is None and prop.alternateID:
                    method_name = "by" + db_name[0].upper() + db_name[1:]
                if method_name is not None:

                    def func(cls, key, attr=attr):
                        store = cls._get_store()
                        obj = store.find(cls, getattr(cls, attr) == key).one()
                        if obj is None:
                            raise SQLObjectNotFound
                        return obj

                    func.func_name = method_name
                    dict[method_name] = classmethod(func)
            elif isinstance(prop, SQLMultipleJoin):
                # Generate addFoo/removeFoo names.
                def define_add_remove(dict, prop):
                    capitalised_name = (prop._otherClass[0].capitalize() +
                                        prop._otherClass[1:])

                    def add(self, obj):
                        prop._get_bound_reference_set(self).add(obj)

                    add.__name__ = "add" + capitalised_name
                    dict.setdefault(add.__name__, add)

                    def remove(self, obj):
                        prop._get_bound_reference_set(self).remove(obj)

                    remove.__name__ = "remove" + capitalised_name
                    dict.setdefault(remove.__name__, remove)

                define_add_remove(dict, prop)

        id_type = dict.setdefault("_idType", int)
        id_cls = {int: Int, str: RawStr, unicode: AutoUnicode}[id_type]
        dict["id"] = id_cls(id_name, primary=True, default=AutoReload)
        attr_to_prop[id_name] = "id"

        # Notice that obj is the class since this is the metaclass.
        obj = super(SQLObjectMeta, cls).__new__(cls, name, bases, dict)

        property_registry = obj._storm_property_registry

        property_registry.add_property(obj, getattr(obj, "id"),
                                       "<primary key>")

        # Let's explore this same mechanism to register table names,
        # so that we can find them to handle prejoinClauseTables.
        property_registry.add_property(obj, getattr(obj, "id"),
                                       "<table %s>" % table_name)

        for fake_name, real_name in attr_to_prop.items():
            prop = getattr(obj, real_name)
            if fake_name != real_name:
                property_registry.add_property(obj, prop, fake_name)
            attr_to_prop[fake_name] = prop

        obj._attr_to_prop = attr_to_prop

        if default_order is not None:
            cls_info = get_cls_info(obj)
            cls_info.default_order = obj._parse_orderBy(default_order)

        return obj
Esempio n. 41
0
    def __new__(cls, name, bases, dict):
        if Storm in bases or SQLObjectBase in bases:
            # Do not parse abstract base classes.
            return type.__new__(cls, name, bases, dict)

        style = cls._get_attr("_style", bases, dict)
        if style is None:
            dict["_style"] = style = SQLObjectStyle()

        table_name = cls._get_attr("_table", bases, dict)
        if table_name is None:
            table_name = style.pythonClassToDBTable(name)

        id_name = cls._get_attr("_idName", bases, dict)
        if id_name is None:
            id_name = style.idForTable(table_name)

        # Handle this later to call _parse_orderBy() on the created class.
        default_order = cls._get_attr("_defaultOrder", bases, dict)

        dict["__storm_table__"] = table_name

        attr_to_prop = {}
        for attr, prop in dict.items():
            attr_to_prop[attr] = attr
            if isinstance(prop, ForeignKey):
                db_name = prop.kwargs.get("dbName", attr)
                local_prop_name = style.instanceAttrToIDAttr(attr)
                dict[local_prop_name] = local_prop = Int(db_name)
                dict[attr] = Reference(local_prop,
                                       "%s.<primary key>" % prop.foreignKey)
                attr_to_prop[attr] = local_prop_name
            elif isinstance(prop, PropertyAdapter):
                db_name = prop.dbName or attr
                method_name = prop.alternateMethodName
                if method_name is None and prop.alternateID:
                    method_name = "by" + db_name[0].upper() + db_name[1:]
                if method_name is not None:

                    def func(cls, key, attr=attr):
                        store = cls._get_store()
                        obj = store.find(cls, getattr(cls, attr) == key).one()
                        if obj is None:
                            raise SQLObjectNotFound
                        return obj

                    func.func_name = method_name
                    dict[method_name] = classmethod(func)

        id_type = dict.get("_idType", int)
        id_cls = {int: Int, str: RawStr, unicode: AutoUnicode}[id_type]
        dict[id_name] = id_cls(primary=True)

        # Notice that obj is the class since this is the metaclass.
        obj = super(SQLObjectMeta, cls).__new__(cls, name, bases, dict)

        property_registry = obj._storm_property_registry

        property_registry.add_property(obj, getattr(obj, id_name),
                                       "<primary key>")

        for fake_name, real_name in attr_to_prop.items():
            prop = getattr(obj, real_name)
            if fake_name != real_name:
                property_registry.add_property(obj, prop, fake_name)
            attr_to_prop[fake_name] = prop

        obj._attr_to_prop = attr_to_prop

        if default_order is not None:
            cls_info = get_cls_info(obj)
            cls_info.default_order = obj._parse_orderBy(default_order)

        return obj
Esempio n. 42
0
    def __new__(cls, name, bases, dict):
        if Storm in bases or SQLObjectBase in bases:
            # Do not parse abstract base classes.
            return type.__new__(cls, name, bases, dict)

        style = cls._get_attr("_style", bases, dict)
        if style is None:
            dict["_style"] = style = SQLObjectStyle()

        table_name = cls._get_attr("_table", bases, dict)
        if table_name is None:
            table_name = style.pythonClassToDBTable(name)

        id_name = cls._get_attr("_idName", bases, dict)
        if id_name is None:
            id_name = style.idForTable(table_name)

        # Handle this later to call _parse_orderBy() on the created class.
        default_order = cls._get_attr("_defaultOrder", bases, dict)

        dict["__storm_table__"] = table_name

        attr_to_prop = {}
        for attr, prop in dict.items():
            attr_to_prop[attr] = attr
            if isinstance(prop, ForeignKey):
                db_name = prop.kwargs.get("dbName", attr)
                local_prop_name = style.instanceAttrToIDAttr(attr)
                dict[local_prop_name] = local_prop = Int(db_name)
                dict[attr] = Reference(local_prop,
                                       "%s.<primary key>" % prop.foreignKey)
                attr_to_prop[attr] = local_prop_name
            elif isinstance(prop, PropertyAdapter):
                db_name = prop.dbName or attr
                method_name = prop.alternateMethodName
                if method_name is None and prop.alternateID:
                    method_name = "by" + db_name[0].upper() + db_name[1:]
                if method_name is not None:
                    def func(cls, key, attr=attr):
                        store = cls._get_store()
                        obj = store.find(cls, getattr(cls, attr) == key).one()
                        if obj is None:
                            raise SQLObjectNotFound
                        return obj
                    func.func_name = method_name
                    dict[method_name] = classmethod(func)


        id_type = dict.get("_idType", int)
        id_cls = {int: Int, str: RawStr, unicode: AutoUnicode}[id_type]
        dict[id_name] = id_cls(primary=True)

        # Notice that obj is the class since this is the metaclass.
        obj = super(SQLObjectMeta, cls).__new__(cls, name, bases, dict)

        property_registry = obj._storm_property_registry

        property_registry.add_property(obj, getattr(obj, id_name),
                                       "<primary key>")

        for fake_name, real_name in attr_to_prop.items():
            prop = getattr(obj, real_name)
            if fake_name != real_name:
                property_registry.add_property(obj, prop, fake_name)
            attr_to_prop[fake_name] = prop

        obj._attr_to_prop = attr_to_prop

        if default_order is not None:
            cls_info = get_cls_info(obj)
            cls_info.default_order = obj._parse_orderBy(default_order)

        return obj