Пример #1
0
 def delete(self):
     if self.user_can_delete():
         with DelayCommit(db):
             db.talks.delete({
                 "seminar_id": self.seminar_id,
                 "seminar_ctr": -self.seminar_ctr
             })
             db.talks.update(
                 {
                     "seminar_id": self.seminar_id,
                     "seminar_ctr": self.seminar_ctr
                 }, {
                     "deleted": True,
                     "deleted_with_seminar": False
                 })
             for i, talk_sub in db._execute(
                     SQL("SELECT {},{} FROM {} WHERE {} ? %s").format(*map(
                         IdentifierWrapper,
                         [
                             "id", "talk_subscriptions", "users",
                             "talk_subscriptions"
                         ],
                     )),
                 [self.seminar.shortname],
             ):
                 if self.seminar_ctr in talk_sub[self.seminar.shortname]:
                     talk_sub[self.seminar.shortname].remove(
                         self.seminar_ctr)
                     db.users.update({"id": i},
                                     {"talk_subscriptions": talk_sub})
         self.deleted = True
         return True
     else:
         return False
Пример #2
0
 def delete(self):
     # We don't actually delete from the seminars and talks tables but instead just
     # set the deleted flag.  We actually delete from seminar_organizers and subscriptions
     # since these are less important.
     if self.user_can_delete():
         with DelayCommit(db):
             db.seminars.update({"shortname": self.shortname}, {"deleted": True})
             db.talks.update({"seminar_id": self.shortname, "deleted": False}, {"deleted": True, "deleted_with_seminar": True})
             for elt in db.users.search(
                 {"seminar_subscriptions": {"$contains": self.shortname}},
                 ["id", "seminar_subscriptions"],
             ):
                 elt["seminar_subscriptions"].remove(self.shortname)
                 db.users.update(
                     {"id": elt["id"]},
                     {"seminar_subscriptions": elt["seminar_subscriptions"]}
                 )
             for i, talk_sub in db._execute(
                 SQL("SELECT {},{} FROM {} WHERE {} ? %s").format(
                     *map(
                         IdentifierWrapper,
                         ["id", "talk_subscriptions", "users", "talk_subscriptions"],
                     )
                 ),
                 [self.shortname],
             ):
                 del talk_sub[self.shortname]
                 db.users.update({"id": i}, {"talk_subscriptions": talk_sub})
         self.deleted = True
         return True
     else:
         return False
Пример #3
0
 def make_creator(self, email, endorser):
     with DelayCommit(self):
         db.users.update({"email": ilike_query(email)}, {"creator": True, "endorser": endorser}, restat=False)
         # Update all of this user's created seminars and talks
         db.seminars.update({"owner": ilike_query(email)}, {"display": True})
         # Could do this with a join...
         for sem in seminars_search({"owner": ilike_query(email)}, "shortname"):
             db.talks.update({"seminar_id": sem}, {"display": True}, restat=False)
Пример #4
0
 def delete(self, data):
     # We keep the uid in the users table (so that it doesn't get reused), but remove all personal information
     uid = data["id"]
     email = data["email"]
     with DelayCommit(db):
         # We probably have code that assumes that admin/owner isn't None....
         db.institutions.update({"admin": ilike_query(email)}, {"admin": "*****@*****.**"})
         db.seminars.update({"owner": ilike_query(email)}, {"owner": "*****@*****.**"})
         db.seminar_organizers.delete({"email": ilike_query(email)})
         db.talks.update({"speaker_email": ilike_query(email)}, {"speaker_email": ""})
         self.update({"id": uid}, {key: None for key in self.search_cols}, restat=False)
Пример #5
0
 def save(self, data):
     data = dict(data)  # copy
     email = data.pop("email", None)
     if not email:
         raise ValueError("data must contain email")
     user = self.lookup(email)
     if not user:
         raise ValueError("user does not exist")
     if not data:
         raise ValueError("no data to save")
     if "new_email" in data:
         data["email"] = data.pop("new_email")
         try:
             # standerdize email
             data["email"] = validate_email(data["email"])["email"]
         except EmailNotValidError as e:
             flash_error("""Oops, email '%s' is not allowed. %s""",
                         data["email"], str(e))
             return False
         if self.user_exists(data["email"]):
             flash_error(
                 "There is already a user registered with email = %s",
                 data["email"])
             return False
     for key in list(data):
         if key not in self.search_cols:
             if key != "id":
                 from .app import app
                 app.logger.critical(
                     "Need to update pwdmanager code to account for schema change key=%s"
                     % key)
             data.pop(key)
     with DelayCommit(db):
         if "email" in data:
             newemail = data["email"]
             db.institutions.update({"admin": ilike_query(email)},
                                    {"admin": newemail})
             db.seminars.update({"owner": ilike_query(email)},
                                {"owner": newemail})
             db.seminar_organizers.update({"email": ilike_query(email)},
                                          {"email": newemail})
             db.talks.update({"speaker_email": ilike_query(email)},
                             {"speaker_email": newemail})
         self.update({"email": ilike_query(email)}, data, restat=False)
     return True
Пример #6
0
    def add_topics(self, filename, dryrun=False):
        """
        File format: one line for each topic, asterisks to indicate children, tilde for dividing topic id from topic name.
        You can include a topic that already exists in order to add children, and you can include a topic multiple times to get the DAG structure.
        Example:

        chem ~ Chemistry
        * bio_BC ~ biochemistry
        ** bio_EZ ~ enzymology
        bio ~ Biology
        * bio_BC ~ biochemistry
        math_NT ~ number theory
        * math_AR ~ arithmetic geometry
        math_AG ~ algebraic geometry
        * math_AR ~ arithmetic geometry
        """
        existing_topics = dict(self.by_id)
        new_topics = {}
        children = defaultdict(set)
        children.update({
            tid: set(WT.id for WT in self.by_id[tid].children)
            for tid in self.by_id
        })
        update_children = set()
        current_path = []
        with open(filename) as F:
            for line in F:
                m = re.match(r"^([*\s]*)(.*)", line)
                depth = m.group(1).count("*")
                if depth > len(current_path):
                    raise ValueError(
                        "Invalid tree structure: can only indent one level at a time"
                    )
                content = m.group(2).split("~")
                if len(content) != 2:
                    raise ValueError("You must specify both id and name: %s" %
                                     content)
                topic_id = content[0].strip()
                topic_name = content[1].strip()
                current_path = current_path[:depth]
                if topic_id in existing_topics:
                    old_name = existing_topics[topic_id].name
                    if topic_name != old_name:
                        raise ValueError(
                            "Inconsistent topic name: %s (new) vs %s (existing)"
                            % (topic_name, old_name))
                else:
                    new_topics[topic_id] = topic_name
                    existing_topics[topic_id] = WebTopic(topic_id, topic_name)
                if current_path:
                    if topic_id not in children[current_path[-1]]:
                        update_children.add(current_path[-1])
                    children[current_path[-1]].add(topic_id)
                current_path.append(topic_id)
        topic_list = [{
            "topic_id": tid,
            "name": name,
            "children": sorted(children[tid])
        } for (tid, name) in new_topics.items()]
        updates = {
            tid: sorted(children[tid])
            for tid in update_children if tid not in new_topics
        }
        print("New topics being added:\n  %s" %
              ("\n  ".join(T["name"] for T in topic_list)))
        print("Child relationships being added:")
        for T in topic_list:
            for C in T["children"]:
                print("  %s -> %s" % (T["name"], existing_topics[C].name))
        for tid, children in updates.items():
            for C in children:
                if C not in self.by_id[tid].children:
                    print("  %s -> %s" %
                          (existing_topics[tid].name, existing_topics[C].name))
        if not dryrun:
            with DelayCommit(db):
                db.new_topics.insert_many(topic_list)
                for tid, children in updates.items():
                    db.new_topics.update({"topic_id": tid},
                                         {"children": children})
Пример #7
0
def write_content_table(data_folder, table, query, selecter, approve_row, users, sep):
    now_overall = time.time()
    print("Exporting %s..." % (table.search_table))
    # The SQL queries for talks and seminars are different
    tablename = table.search_table
    if table in [db.talks, db.seminars]:
        cols = SQL(", ").join(map(IdentifierWrapper, ["id"] + table.search_cols))
        query = SQL(query)
        selecter = selecter.format(cols, cols, IdentifierWrapper(tablename), query)

    searchfile = os.path.join(data_folder, tablename + ".txt")


    header = sep.join(["id"] + table.search_cols) + "\n" + sep.join(["bigint"] + [table.col_type[col] for col in table.search_cols]) + "\n\n"
    table._copy_to_select(selecter, searchfile, header, sep=sep)
    safe_cols = ["id"] + [col for col in table.search_cols if col in whitelisted_cols]
    clear_private_data(searchfile, safe_cols, approve_row, users, sep)

    # do the other files

    from psycodict.table import _counts_cols, _stats_cols
    from psycodict.base import _meta_indexes_cols, _meta_constraints_cols, _meta_tables_cols
    statsfile = None #os.path.join(data_folder, tablename + "_stats.txt")
    countsfile = None #os.path.join(data_folder, tablename + "_counts.txt")
    indexesfile = os.path.join(data_folder, tablename + "_indexes.txt")
    constraintsfile = os.path.join(data_folder, tablename + "_constraints.txt")
    metafile = os.path.join(data_folder, tablename + "_meta.txt")
    tabledata = [
        # tablename, cols, addid, write_header, filename
        (table.stats.counts, _counts_cols, False, False, countsfile),
        (table.stats.stats, _stats_cols, False, False, statsfile),
    ]

    metadata = [
        ("meta_indexes", "table_name", _meta_indexes_cols, indexesfile),
        ("meta_constraints", "table_name", _meta_constraints_cols, constraintsfile),
        ("meta_tables", "name", _meta_tables_cols, metafile),
    ]

    with DelayCommit(table):
        for tbl, cols, addid, write_header, filename in tabledata:
            if filename is None:
                continue
            now = time.time()
            if addid:
                cols = ["id"] + cols
            cols_wquotes = ['"' + col + '"' for col in cols]
            cur = table._db.cursor()
            with open(filename, "w") as F:
                try:
                    if write_header:
                        table._write_header_lines(F, cols, sep=sep)
                    cur.copy_to(F, tbl, columns=cols_wquotes, sep=sep)
                except Exception:
                    table.conn.rollback()
                    raise
            print(
                "\tExported %s in %.3f secs to %s"
                % (tbl, time.time() - now, filename)
            )

        for tbl, wherecol, cols, filename in metadata:
            if filename is None:
                continue
            now = time.time()
            cols = SQL(", ").join(map(Identifier, cols))
            select = SQL("SELECT {0} FROM {1} WHERE {2} = {3}").format(
                cols,
                Identifier(tbl),
                Identifier(wherecol),
                Literal(table.search_table),
            )
            table._copy_to_select(select, filename, silent=True, sep=sep)
            print(
                "\tExported data from %s in %.3f secs to %s"
                % (tbl, time.time() - now, filename)
            )

        print(
            "Exported %s in %.3f secs"
            % (table.search_table, time.time() - now_overall)
        )
Пример #8
0
 def save_organizers(self):
     # Need to allow for deleting organizers, so we delete them all then add them back
     with DelayCommit(db):
         db.seminar_organizers.delete({"seminar_id": self.shortname})
         db.seminar_organizers.insert_many(self.organizers)