def do_delete(db): cursor = db.cursor() if isinstance(item, Entity): # it's a spec # get a new version number for all changes we may need to make time = t if not t is None else datetime.now(utc) cursor.execute(""" INSERT INTO asa_version (time, author, ipnr, comment, readonly) VALUES (%s,%s,%s,%s,%s) """, (to_utimestamp(time), author, remote_addr, comment, 0)) version_id = db.get_last_id(cursor, 'asa_version') # change artifacts of the deleted spec to point to the "Instance" spec cursor.execute(""" INSERT INTO asa_artifact (id, version_id, spec, title_expr) SELECT id, %s, %s, title_expr FROM ( SELECT id, max(version_id), title_expr FROM asa_artifact WHERE spec=%s GROUP BY id )""", (version_id, Instance.get_name(), item.get_name())) # change specs inheriting from the deleted spec to inherit from "Instance" instead cursor.execute(""" INSERT INTO asa_spec (name, version_id, base_class) SELECT name, %s, %s FROM ( SELECT name, max(version_id) FROM asa_spec WHERE base_class=%s GROUP BY name )""", (version_id, Instance.get_name(), item.get_name())) # change attributes that had the deleted spec as type cursor.execute(""" INSERT INTO asa_spec_attribute (spec_name, version_id, name, multplicity_low, multplicity_high, type, uiorder) SELECT spec_name, %s, name, multplicity_low, multplicity_high, %s, uiorder FROM ( SELECT spec_name, max(version_id), name, multplicity_low, multplicity_high, uiorder FROM asa_spec_attribute WHERE type=%s GROUP BY spec_name, name )""", (version_id, Instance.get_name(), item.get_name())) # finally, delete the spec cursor.execute("DELETE FROM asa_spec_attribute WHERE spec_name=%s", (item.get_name(),)) cursor.execute("DELETE FROM asa_spec WHERE name=%s", (item.get_name(),)) else: # it's an artifact cursor.execute("DELETE FROM asa_artifact_value WHERE artifact_id=%s", (item.get_id(),)) cursor.execute("DELETE FROM asa_artifact_id WHERE id=%s", (item.get_id(),)) cursor.execute("DELETE FROM asa_artifact WHERE id=%s", (item.get_id(),)) self.pool.remove(item) self.env.log.info("Deleted item '%s'" % item.get_id())
def load_spec(self, spec_name, db=None): # Ignore requests to load the top-most classes of the instantiation chain (Entity and Instance), # as these will not be persisted to the database and will be always available from the pool. if spec_name in (Entity.get_name(), Instance.get_name()): return if not db: db = self.env.get_read_db() version = self._get_latest_spec_version(spec_name, db) if version is None: raise ValueError("No version found for spec with name '%s'" % (spec_name,)) # get the baseclass base_class = None cursor = db.cursor() rows = cursor.execute( """ SELECT base_class FROM asa_spec WHERE name='%s' AND version_id='%d' GROUP BY name""" % (spec_name, version) ) base_class_name = rows.fetchone() if not base_class_name is None and len(base_class_name) > 0: base_class_name = base_class_name[0] # load base classes (recursively until the root is reached) if base_class_name == Instance.get_name(): base_class = Instance else: self.load_spec(base_class_name, db) base_class = self.pool.get_item(base_class_name) bases = (base_class,) if not base_class is None else tuple() # get the attributes attributes = [] cursor = db.cursor() rows = cursor.execute( """ SELECT name, multplicity_low, multplicity_high, type, uiorder FROM asa_spec_attribute WHERE spec_name='%s' AND version_id='%d'""" % (spec_name, version) ) for row in rows.fetchall(): attributes.append(Attribute(name=row[0], multiplicity=(row[1], row[2]), atype=row[3], order=row[4])) # create the entity spec = Entity(name=spec_name, bases=bases, version=version, persisted=True, attributes=attributes) if self.pool.get_item(spec.get_name()) is None: self.pool.add(spec)
def get_list_search_no_spec(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) dbp.load_artifacts_of(Instance.get_name()) artifacts_with_no_spec = dbp.pool.get_instances_of(Instance.get_name(), direct_instances_only=True) data = { 'context': Context.from_request(request.req, resource), 'action': 'list', 'list_title': 'Artifacts without a spec', 'spec': Instance, 'artifacts': artifacts_with_no_spec, } return 'list_spec_artifacts_page.html', data, None
def get_list_search_by_filter(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) dbp.load_artifacts_of(Instance.get_name()) artifacts_with_no_spec = dbp.pool.get_instances_of(Instance.get_name(), direct_instances_only=True) # track access dbp.track_it("pick_artifact", "", "view", request.req.authname, str(datetime.now())) data = { 'context': Context.from_request(request.req, resource), 'url_path': '', } return 'index_dialog.html', data, None
def get_list_search_no_spec(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) dbp.load_artifacts_of(Instance.get_name()) artifacts_with_no_spec = dbp.pool.get_instances_of( Instance.get_name(), direct_instances_only=True) data = { 'context': Context.from_request(request.req, resource), 'action': 'list', 'list_title': 'Artifacts without a spec', 'spec': Instance, 'artifacts': artifacts_with_no_spec, } return 'list_spec_artifacts_page.html', data, None
def get_list_search_by_filter(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) dbp.load_artifacts_of(Instance.get_name()) artifacts_with_no_spec = dbp.pool.get_instances_of( Instance.get_name(), direct_instances_only=True) # track access dbp.track_it("pick_artifact", "", "view", request.req.authname, str(datetime.now())) data = { 'context': Context.from_request(request.req, resource), 'url_path': '', } return 'index_dialog.html', data, None
def load_spec(self, spec_name, db=None): # Ignore requests to load the top-most classes of the instantiation chain (Entity and Instance), # as these will not be persisted to the database and will be always available from the pool. if spec_name in (Entity.get_name(), Instance.get_name()): return if not db: db = self.env.get_read_db() version = self._get_latest_spec_version(spec_name, db) if version is None: raise ValueError("No version found for spec with name '%s'" % (spec_name,)) # get the baseclass base_class = None cursor = db.cursor() rows = cursor.execute(""" SELECT base_class FROM asa_spec WHERE name='%s' AND version_id='%d' GROUP BY name""" % (spec_name, version)) base_class_name = rows.fetchone() if not base_class_name is None and len(base_class_name) > 0: base_class_name = base_class_name[0] # load base classes (recursively until the root is reached) if base_class_name == Instance.get_name(): base_class = Instance else: self.load_spec(base_class_name, db) base_class = self.pool.get_item(base_class_name) bases = (base_class,) if not base_class is None else tuple() # get the attributes attributes = [] cursor = db.cursor() rows = cursor.execute(""" SELECT name, multplicity_low, multplicity_high, type, uiorder FROM asa_spec_attribute WHERE spec_name='%s' AND version_id='%d'""" % (spec_name, version)) for row in rows.fetchall(): attributes.append(Attribute(name=row[0], multiplicity=(row[1], row[2]), atype=row[3], order=row[4])) # create the entity spec = Entity(name=spec_name, bases=bases, version=version, persisted=True, attributes=attributes) if self.pool.get_item(spec.get_name()) is None: self.pool.add(spec)
def _upgrade_to_0dot3(self, db): cursor = db.cursor() cursor.execute("ALTER TABLE asa_artifact_wiki RENAME TO asa_artifact_wiki_references;") for table in schema: # TODO: fix. reference to global var if table.name == "asa_artifact_artifact_references": self._create_table(table, cursor) break from AdaptiveArtifacts.persistence.data import DBPool from AdaptiveArtifacts.model.pool import InstancePool from AdaptiveArtifacts.model.core import Instance dbp = DBPool(self.env, InstancePool()) dbp.load_specs() dbp.load_artifacts_of(Instance.get_name()) for artifact in dbp.pool.get_instances_of(Instance.get_name()): dbp.update_artifact_ref_count(artifact, db) cursor.execute("UPDATE system SET value='0.3' WHERE name='%s'" % (self.db_key,)) self.log.info('Upgraded ASA tables from versions 0.1/0.2 to 0.3')
def _upgrade_to_0dot3(self, db): cursor = db.cursor() cursor.execute( "ALTER TABLE asa_artifact_wiki RENAME TO asa_artifact_wiki_references;" ) for table in schema: # TODO: fix. reference to global var if table.name == "asa_artifact_artifact_references": self._create_table(table, cursor) break from AdaptiveArtifacts.persistence.data import DBPool from AdaptiveArtifacts.model.pool import InstancePool from AdaptiveArtifacts.model.core import Instance dbp = DBPool(self.env, InstancePool()) dbp.load_specs() dbp.load_artifacts_of(Instance.get_name()) for artifact in dbp.pool.get_instances_of(Instance.get_name()): dbp.update_artifact_ref_count(artifact, db) cursor.execute("UPDATE system SET value='0.3' WHERE name='%s'" % (self.db_key, )) self.log.info('Upgraded ASA tables from versions 0.1/0.2 to 0.3')
def build_saved_and_reloaded_pool(testcase): testcase.env = EnvironmentStub(enable=['trac.*', 'AdaptiveArtifacts.*', 'AdaptiveArtifacts.persistence.db.*']) Setup(testcase.env).upgrade_environment(testcase.env.get_db_cnx()) # this works as far as no one inherits from MetaModelInstancesStructureAfterLoad and ModelInstancesStructureAfterLoad super(testcase.__class__, testcase).setUp() dbp = DBPool(testcase.env, testcase.pool) dbp.save('anonymous',"","120.0.0.1") new_pool = InstancePool() new_dbp = DBPool(testcase.env, new_pool) for instance in testcase.pool.get_instances_of(Instance.get_name()): new_dbp.load_artifact(instance.get_id()) for entity in testcase.pool.get_instances_of(Entity.get_name()): new_dbp.load_spec(entity.get_name()) testcase.pool = new_pool
def do_delete(db): cursor = db.cursor() if isinstance(item, Entity): # it's a spec # get a new version number for all changes we may need to make time = t if not t is None else datetime.now(utc) cursor.execute( """ INSERT INTO asa_version (time, author, ipnr, comment, readonly) VALUES (%s,%s,%s,%s,%s) """, (to_utimestamp(time), author, remote_addr, comment, 0), ) version_id = db.get_last_id(cursor, "asa_version") # change artifacts of the deleted spec to point to the "Instance" spec cursor.execute( """ INSERT INTO asa_artifact (id, version_id, spec, title_expr) SELECT id, %s, %s, title_expr FROM ( SELECT id, max(version_id), title_expr FROM asa_artifact WHERE spec=%s GROUP BY id )""", (version_id, Instance.get_name(), item.get_name()), ) # change specs inheriting from the deleted spec to inherit from "Instance" instead cursor.execute( """ INSERT INTO asa_spec (name, version_id, base_class) SELECT name, %s, %s FROM ( SELECT name, max(version_id) FROM asa_spec WHERE base_class=%s GROUP BY name )""", (version_id, Instance.get_name(), item.get_name()), ) # change attributes that had the deleted spec as type cursor.execute( """ INSERT INTO asa_spec_attribute (spec_name, version_id, name, multplicity_low, multplicity_high, type, uiorder) SELECT spec_name, %s, name, multplicity_low, multplicity_high, %s, uiorder FROM ( SELECT spec_name, max(version_id), name, multplicity_low, multplicity_high, uiorder FROM asa_spec_attribute WHERE type=%s GROUP BY spec_name, name )""", (version_id, Instance.get_name(), item.get_name()), ) # finally, delete the spec cursor.execute("DELETE FROM asa_spec_attribute WHERE spec_name=%s", (item.get_name(),)) cursor.execute("DELETE FROM asa_spec WHERE name=%s", (item.get_name(),)) else: # it's an artifact cursor.execute("DELETE FROM asa_artifact_value WHERE artifact_id=%s", (item.get_id(),)) cursor.execute("DELETE FROM asa_artifact_id WHERE id=%s", (item.get_id(),)) cursor.execute("DELETE FROM asa_artifact WHERE id=%s", (item.get_id(),)) self.pool.remove(item) self.env.log.info("Deleted item '%s'" % item.get_id())
def get_index(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) # Load *everything* TODO: make more efficient dbp.load_specs() dbp.load_artifacts_of(Instance.get_name()) def get_spec_data(base_spec): specs = [] for spec in sorted(dbp.pool.get_items((1,), base_spec), key=lambda spec: spec.get_name()): specs.append((spec, get_spec_data(spec.get_name()), len(dbp.pool.get_instances_of(spec.get_name())))) return specs specs = get_spec_data(Instance.get_name()) spec_name = request.req.args.get('spec', None) selected_spec = dbp.pool.get_item(spec_name) if spec_name else None searches = [('no_spec', 'Artifacts with no Type', len(dbp.pool.get_instances_of(Instance.get_name(), direct_instances_only=True)))] selected_search = request.req.args.get('search', None) if selected_spec is None and not selected_search is None and selected_search == 'no_spec': selected_spec = Instance if selected_spec is None: spec_attrs = [] artifacts_attrs = [] artifacts_values = [] artifacts_pages_count = {} artifacts_rel_artifact_count = {} else: if not selected_search is None and selected_search == 'no_spec': artifacts = dbp.pool.get_instances_of(selected_spec.get_name(), direct_instances_only=True) else: artifacts = dbp.pool.get_instances_of(selected_spec.get_name()) # Get attributes defined at the spec level ... spec_attrs = [(attribute.name, attribute.owner_spec.get_name()) for attribute in selected_spec.get_attributes()] # ... and those that only exist at the level of the artifacts themselves keys_count = {} for a in artifacts: for k,v in a.get_values(): if v: if k in keys_count: keys_count[k].append(v) else: keys_count[k] = [v] # first, order attributes by how many values there are for them. that being equal order by attribute name all_values = sorted(keys_count.items(), key=lambda x: (len(x[1])*-1, unicode.lower(x[0]))) artifacts_attrs_names = [k for k,_ in all_values] for a_name, a_owner in spec_attrs: if a_name in artifacts_attrs_names: artifacts_attrs_names.remove(a_name) artifacts_attrs = [(a_name, None) for a_name in artifacts_attrs_names] # Build a matrix of the attribute values to be shown in the index page artifacts_values = [] for artifact in artifacts: values = dict(artifact.get_values()) ordered_values_lst = [] for attributes in [spec_attrs, artifacts_attrs]: for attribute in attributes: if attribute[0] in values: data = values[attribute[0]] if not type(data) is list: data = [data] joined = ", ".join(data) joined = re.sub('(\[\[ASA\([0-9]+\)\]\])', '', joined) # no ASA macro expansions in the listing ordered_values_lst.append({"full": joined, "is_long": True if len(joined) > 40 else None}) else: ordered_values_lst.append({"full": u"", "is_long": None}) artifacts_values.append((artifact, ordered_values_lst)) # Reorder the lines of the matrix so that artifacts with the first columns filled in appear first artifacts_values = sorted(artifacts_values, key=lambda x: tuple([unicode.lower(v["full"]) if v["full"] else 'zzzzzzzzzz' for v in x[1]])) # get a count of the number of pages and number of artifacts that are referenced by each artifact artifacts_pages_count = {} artifacts_rel_artifact_count = {} for artifact in artifacts: artifacts_pages_count[artifact] = len(list(dbp.get_wiki_page_ref_counts(artifact))) artifacts_rel_artifact_count[artifact] = len(list(dbp.get_related_artifact_ref_counts(artifact))) # track access dbp.track_it("index", "", "view", request.req.authname, str(datetime.now())) data = { 'context': Context.from_request(request.req, resource), 'action': 'list', 'specs': specs, 'searches': searches, 'selected_spec': selected_spec, 'selected_search': selected_search, 'spec_columns': spec_attrs, 'arti_columns': artifacts_attrs, 'artifacts_values': artifacts_values, 'artifacts_pages_count': artifacts_pages_count, 'artifacts_rel_artifact_count': artifacts_rel_artifact_count, } return 'index_page.html', data, None
def get_index(request, dbp, obj, resource): require_permission(request.req, resource, dbp.env) # Load *everything* TODO: make more efficient dbp.load_specs() dbp.load_artifacts_of(Instance.get_name()) def get_spec_data(base_spec): specs = [] for spec in sorted(dbp.pool.get_items((1, ), base_spec), key=lambda spec: spec.get_name()): specs.append((spec, get_spec_data(spec.get_name()), len(dbp.pool.get_instances_of(spec.get_name())))) return specs specs = get_spec_data(Instance.get_name()) spec_name = request.req.args.get('spec', None) selected_spec = dbp.pool.get_item(spec_name) if spec_name else None searches = [('no_spec', 'Artifacts with no Type', len( dbp.pool.get_instances_of(Instance.get_name(), direct_instances_only=True)))] selected_search = request.req.args.get('search', None) if selected_spec is None and not selected_search is None and selected_search == 'no_spec': selected_spec = Instance if selected_spec is None: spec_attrs = [] artifacts_attrs = [] artifacts_values = [] artifacts_pages_count = {} artifacts_rel_artifact_count = {} else: if not selected_search is None and selected_search == 'no_spec': artifacts = dbp.pool.get_instances_of(selected_spec.get_name(), direct_instances_only=True) else: artifacts = dbp.pool.get_instances_of(selected_spec.get_name()) # Get attributes defined at the spec level ... spec_attrs = [(attribute.name, attribute.owner_spec.get_name()) for attribute in selected_spec.get_attributes()] # ... and those that only exist at the level of the artifacts themselves keys_count = {} for a in artifacts: for k, v in a.get_values(): if v: if k in keys_count: keys_count[k].append(v) else: keys_count[k] = [v] # first, order attributes by how many values there are for them. that being equal order by attribute name all_values = sorted(keys_count.items(), key=lambda x: (len(x[1]) * -1, unicode.lower(x[0]))) artifacts_attrs_names = [k for k, _ in all_values] for a_name, a_owner in spec_attrs: if a_name in artifacts_attrs_names: artifacts_attrs_names.remove(a_name) artifacts_attrs = [(a_name, None) for a_name in artifacts_attrs_names] # Build a matrix of the attribute values to be shown in the index page artifacts_values = [] for artifact in artifacts: values = dict(artifact.get_values()) ordered_values_lst = [] for attributes in [spec_attrs, artifacts_attrs]: for attribute in attributes: if attribute[0] in values: data = values[attribute[0]] if not type(data) is list: data = [data] joined = ", ".join(data) joined = re.sub( '(\[\[ASA\([0-9]+\)\]\])', '', joined) # no ASA macro expansions in the listing ordered_values_lst.append({ "full": joined, "is_long": True if len(joined) > 40 else None }) else: ordered_values_lst.append({ "full": u"", "is_long": None }) artifacts_values.append((artifact, ordered_values_lst)) # Reorder the lines of the matrix so that artifacts with the first columns filled in appear first artifacts_values = sorted(artifacts_values, key=lambda x: tuple([ unicode.lower(v["full"]) if v["full"] else 'zzzzzzzzzz' for v in x[1] ])) # get a count of the number of pages and number of artifacts that are referenced by each artifact artifacts_pages_count = {} artifacts_rel_artifact_count = {} for artifact in artifacts: artifacts_pages_count[artifact] = len( list(dbp.get_wiki_page_ref_counts(artifact))) artifacts_rel_artifact_count[artifact] = len( list(dbp.get_related_artifact_ref_counts(artifact))) # track access dbp.track_it("index", "", "view", request.req.authname, str(datetime.now())) data = { 'context': Context.from_request(request.req, resource), 'action': 'list', 'specs': specs, 'searches': searches, 'selected_spec': selected_spec, 'selected_search': selected_search, 'spec_columns': spec_attrs, 'arti_columns': artifacts_attrs, 'artifacts_values': artifacts_values, 'artifacts_pages_count': artifacts_pages_count, 'artifacts_rel_artifact_count': artifacts_rel_artifact_count, } return 'index_page.html', data, None