Exemple #1
0
def test_role_activities(default_db):
    """ test the role table """
    with ConnectionMgr.session() as session:
        role_ = {"name": "user"}
        role_["id"] = session.execute(insert(role,
                                             role_)).inserted_primary_key[0]
        activity_ = {"name": "login"}
        activity_["id"] = session.execute(insert(
            activity, activity_)).inserted_primary_key[0]
        session.execute(
            insert(
                activity_roles,
                {
                    "roles_id": activity_["id"],
                    "role_id": role_["id"]
                },
            ))
        session.commit()

        role_ = dict(session.execute(select([role])).first())
        rows = session.execute(
            select([activity],
                   activity_roles.c.role_id == role_["id"]).select_from(
                       activity.join(
                           activity_roles,
                           activity.c.id == activity_roles.c.roles_id)))
        activities = [dict(row) for row in rows]
        assert role_["name"] == "user"
        assert activities[0] == activity_
        assert activities[0]["name"] == "login"
Exemple #2
0
    def _populate_index(self, index_table, fields):
        values = []
        for field in fields:
            if field not in self.__dict__:
                return
            values.append(getattr(self, field) or '')

        values = zip(fields, values)
        values.insert(0, ('id', self.id))
        values = dict(values)

        index = list(self.__class__._find_index(index_table))
        index.insert(0, 'id')
        columns = filter(lambda col: col.name in index, index_table.c)
        clauses = [
            col == value
            for col, value in zip(columns, [values[i] for i in index])
        ]

        query = select([func.count(index_table.c.id)
                        ]).where(and_(*clauses)).scalar()

        if query:
            update(index_table).where(index_table.c.id == self.id).values(
                **values).execute()
        else:
            insert(index_table, values=values).execute()
Exemple #3
0
 def insert_query(self, insert_data: Union[List[Dict], Dict]) -> 'Query':
     """
     insert query
     Args:
         insert_data: 值类型Dict or List[Dict]
     Returns:
         Select object
     """
     self._verify_model()
     try:
         insert_data_: Union[List[Dict], Dict]
         if isinstance(insert_data, dict):
             insert_data_ = {
                 **self._get_model_default_value(),
                 **insert_data
             }
             query = insert(self._model).values(insert_data_)
         else:
             insert_data_ = [{
                 **self._get_model_default_value(),
                 **one_data
             } for one_data in insert_data]
             query = insert(self._model).values(insert_data_[0])
     except SQLAlchemyError as e:
         aelog.exception(e)
         raise QueryArgsError(
             message="Cloumn args error: {}".format(str(e)))
     else:
         self._query_obj, self._insert_data = query, insert_data_
         return self
async def test_retry_on_deadlock_rollback(system):
    if not isinstance(system, DatabaseACESystem):
        pytest.skip("database-only test")

    async with system.get_db() as db:
        await db.execute(
            text(
                """CREATE TRIGGER trigger_deadlock BEFORE INSERT ON `config` BEGIN 
            SELECT CASE WHEN NEW.key = 'test2' THEN RAISE ( ROLLBACK, 'DEADLOCK' ) END; END"""
            )
        )
        await db.commit()

    with pytest.raises(IntegrityError):
        await system.retry_on_deadlock(
            [
                insert(Config).values(key="test", value="value"),
                insert(Config).values(key="test2", value="value2"),
            ],
            commit=True,
        )

    # neither of these should be set since the entire transaction was rolled back
    async with system.get_db() as db:
        assert (await db.execute(select(Config).where(Config.key == "test"))).one_or_none() is None
        assert (await db.execute(select(Config).where(Config.key == "test2"))).one_or_none() is None
Exemple #5
0
 def fill_geo(self, election, i=0, parent_container_id=None):
   level = self.levels[i]
   title = self.titles[i]
   if election.has_key(title):
     for name, contents in election[title].items():
       values = {'container_id': parent_container_id,
                 'name': name, 'type': level}
       r1 = self.session.execute(sql.insert(self.tables['geo'], values))
       my_id = r1.inserted_primary_key[0]
       if contents.has_key('candidates'):
         for position, candidates in contents['candidates'].items():
           for candidate in candidates:
             values = {'name': candidate['name'],
                       'party_id': self.parties[candidate['party']],
                       'position': position,
                       'container_id': my_id}
             self.session.execute(sql.insert(self.tables['candidates'],
                                             values))
       if contents.has_key('voters'):
         values={'voting_center_id': my_id,
                 'total_votes': contents['voters']
                }
         self.session.execute(sql.insert(self.tables['votes_check'], values))
       if i < len(self.levels) -1:
         self.fill_geo(contents, i+1, my_id)
Exemple #6
0
    async def test_commit(self, engines, binds):
        test_table1 = self.test_models['db1'].test_table1
        test_table2 = self.test_models['db2'].test_table2
        async with Session(engines, binds) as session:
            q = sql.insert(test_table1).values(id=5, title='test_title')
            await session.execute(q)
            q = sql.insert(test_table2).values(id=10, title='test_title2')
            await session.execute(q)
        async with Session(engines, binds) as session:
            q = sql.select(test_table1.c).where(test_table1.c.id == 5)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
            q = sql.select(test_table2.c).where(test_table2.c.id == 10)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)

        try:
            async with Session(engines, binds) as session:
                q = sql.insert(test_table1).values(id=5, title='test_title')
                await session.execute(q)
                session.commit()
                q = sql.insert(test_table2).values(id=10, title='test_title2')
                await session.execute(q)
                session.commit()
                raise Exception
        except:
            pass

        async with Session(engines, binds) as session:
            q = sql.select(test_table1.c).where(test_table1.c.id == 5)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
            q = sql.select(test_table2.c).where(test_table2.c.id == 10)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
Exemple #7
0
def addingredients():

    if request.method == "POST":
        # receives food information from HTML form and capitalises it
        food_item = request.form.get("fooditem").title()
        food_type = request.form.get("foodtype").title()

        for item in db.engine.execute("SELECT ingredient_name FROM ingredients"):
            if food_item.lower() == item[0].lower():
                return apology("That ingredient already exists!")

        # insert data from HTML form
        sqlsession.execute(insert(Table('ingredients', metadata, autoload=True)).values({
            "ingredient_name": food_item,
            "ingredient_type": food_type,
            "ingredient_approved": True                                                 ############## MUST BE MADE FALSE BEFORE RELEASE ###########
        }))

        # Find out the ingredient_id
        lastid = sqlsession.execute("SELECT LAST_INSERT_ID() AS ingredient_id")
        for row in lastid:
            ingredientid = row['ingredient_id']

        # insert data from HTML form
        sqlsession.execute(insert(Table('users_ingredients', metadata, autoload=True)).values({
            "user_id": session.get("user_id"),
            "ingredient_id": ingredientid
        }))        

        # commit changes to the database
        sqlsession.commit()

        return render_template("addingredients.html")
    else:
        return render_template("addingredients.html")
Exemple #8
0
def sendtype():
        
        # receives food information from HTML form and capitalises it
        food_item = session.get('food_item', None)
        food_type = request.form.get("foodtype").title()

        # insert data from HTML form
        sqlsession.execute(insert(Table('ingredients', metadata, autoload=True)).values({
            "ingredient_name": food_item,
            "ingredient_type": food_type,
            "ingredient_approved": True                                                 ############## MUST BE MADE FALSE BEFORE RELEASE ###########
        }))

        # Find out the ingredient_id
        lastid = sqlsession.execute("SELECT LAST_INSERT_ID() AS ingredient_id")
        for row in lastid:
            ingredientid = row['ingredient_id']

        # insert data from HTML form
        sqlsession.execute(insert(Table('users_lists', metadata, autoload=True)).values({
            "user_id": session.get("user_id"),
            "ingredient_id": ingredientid
        }))
        # commit changes to the database
        sqlsession.commit()

        return redirect("/mylist")
Exemple #9
0
    async def test_commit(self, engines, binds):
        test_table1 = self.test_models['db1'].test_table1
        test_table2 = self.test_models['db2'].test_table2
        async with Session(engines, binds) as session:
            q = sql.insert(test_table1).values(id=5, title='test_title')
            await session.execute(q)
            q = sql.insert(test_table2).values(id=10, title='test_title2')
            await session.execute(q)
        async with Session(engines, binds) as session:
            q = sql.select(test_table1.c).where(test_table1.c.id == 5)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
            q = sql.select(test_table2.c).where(test_table2.c.id == 10)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)

        try:
            async with Session(engines, binds) as session:
                q = sql.insert(test_table1).values(id=5, title='test_title')
                await session.execute(q)
                session.commit()
                q = sql.insert(test_table2).values(id=10, title='test_title2')
                await session.execute(q)
                session.commit()
                raise Exception
        except:
            pass

        async with Session(engines, binds) as session:
            q = sql.select(test_table1.c).where(test_table1.c.id == 5)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
            q = sql.select(test_table2.c).where(test_table2.c.id == 10)
            rows = await session.execute(q)
            self.assertEqual(rows.rowcount, 1)
Exemple #10
0
 def record_call(self, existing_id, base_id, user_id, stage, status_code, response, time_elapsed, versions):
     try:
         with self.conn.connect() as cur:
             if existing_id is None: # This is a new entry
                 query = insert(self.deepcite_call_table).values(
                     id = base_id,
                     user_id = user_id,
                     stage = stage,
                     status_code = status_code,
                     response = json.dumps(response),
                     response_time_elapsed = time_elapsed,
                     current_versions = json.dumps(versions)
                 )
                 cur.execute(query)
             else:
                 query = insert(self.deepcite_retrieval_table).values(
                     id = base_id,
                     user_id = user_id,
                     deepcite_call_id = existing_id,
                     stage = stage,
                     status_code = status_code,
                     response_time_elapsed = time_elapsed,
                     current_versions = json.dumps(versions)
                 )
                 cur.execute(query)
     except Exception as e:
         print("ERROR: Unexpected error: Could not commit to database instance.")
         print(e)
Exemple #11
0
def is1111process_view(request):
    VP = views_processor.ViewProcessor()
    if "fastafile" not in request.POST or "fastaentry" not in request.POST:
        raise HTTPNotFound()
    filename = ""
    process_ID = uuid.uuid4().hex
    try:
        filename = request.POST["fastafile"].filename
    except:
        pass
    if filename is not "":
        inputfile = request.POST["fastafile"].file
        file_path = VP.create_file_from_fastafile(inputfile, process_ID,
                                                  "sole")
    else:
        sequence = memoryview(request.POST["fastaentry"].encode("utf-8"))
        file_path = VP.create_file_from_fastaentry(sequence, process_ID)
    command = VP.create_epcr_command(file_path, process_ID, "sole", "is1111")
    subprocess.call(command)
    try:
        is1111_dict = VP.extract_is1111_values(process_ID, "sole")
    except:
        raise HTTPNotAcceptable()
    submission_dict = {
        "ID": process_ID,
        "AnalysisType": "is1111 Insilico typing",
        "IPaddress": request.remote_addr,
    }
    session = request.db2_session
    session.execute(insert(models.SubmissionTable).values([submission_dict]))
    session.execute(insert(models.is1111Profile).values([is1111_dict]))
    session.commit()
    url = request.route_url("resis1111", ID=process_ID)
    return HTTPFound(location=url)
Exemple #12
0
 async def create_manga(self, *args, **kwargs):
     """
     Creates a new manga entry. ID is set by the database.
     Args (* means mandatory):
         type: Manga, Webtoon. Defaults to Manga
         *country_of_origin (str) : ISO-3166 Country Code
         *publication_status (str): Ongoing, Axed, Completed
         *scanlation_status (bool): Is completely scanlated
         mal_id (int): MyAnimeList ID
         anilist_id (int): AniList ID
         mangaupdates_id (int): MangaUpdates ID
     
     Returns Manga object as present in the database.
     """
     nested = await self.session.begin_nested()
     statement = manga.Manga(
         content_type=manga.Types[get_mandatory_parameter(
             kwargs, "type", str)],
         country_of_origin=get_mandatory_parameter(kwargs,
                                                   "country_of_origin",
                                                   str).strip()[0:2],
         publication_status=manga.PubStatuses[get_mandatory_parameter(
             kwargs, "publication_status", str)],
         scanlation_status=get_mandatory_parameter(kwargs,
                                                   "scanlation_status",
                                                   bool),
         mal_id=int(kwargs.get("mal_id", 0) or 0),
         anilist_id=int(kwargs.get("anilist_id", 0) or 0),
         mu_id=int(kwargs.get("mangaupdates_id", 0) or 0),
         titles=[
             manga.Title(title=i)
             for i in get_mandatory_parameter(kwargs, "titles")
         ])
     async with self.lock:
         self.session.add(statement)
         try:
             authors = get_mandatory_parameter(kwargs, "authors")
             for a in authors:
                 res = await self._get_or_insert(creators.Person, name=a)
                 await self.session.execute(
                     insert(creators.Author).values(manga_id=statement.id,
                                                    person_id=res.id))
             artists = get_mandatory_parameter(kwargs, "artists")
             for a in artists:
                 res = await self._get_or_insert(creators.Person, name=a)
                 await self.session.execute(
                     insert(creators.Artist).values(manga_id=statement.id,
                                                    person_id=res.id))
             genres = get_mandatory_parameter(kwargs, "genres")
             for g in genres:
                 res = await self._get_or_insert(manga.Genre, name=g)
                 await self.session.execute(
                     insert(manga.MangaGenre).values(manga_id=statement.id,
                                                     genre_id=res.id))
         except Exception as e:
             await nested.rollback()
             raise e
         await self.session.commit()
         return statement.id
Exemple #13
0
def addrecipes():

    if request.method == "POST":
        recipe = request.form.get("recipe").title()
        cuisine = request.form.get("cuisine").title()
        dish_type = request.form.get("dishtype").title()
        chef = request.form.get("chef", None).title()
        link = request.form.get("link", None)
        page = request.form.get("page", None)
        time = request.form.get("time", None)
        complexity = request.form.get("complexity", None).title()

        # making sure that data being inputted to database is defined
        for item in db.engine.execute("SELECT recipe_name FROM recipes"):
            if recipe.lower() == item[0].lower():
                return render_template("apology.html", message="That recipe already exists!")

        # checks if cuisine exists in 'cuisines' table and INSERTS it if not
        cuisineid = insertifnot("cuisine", cuisine)
        dish_typeid = insertifnot("dish_type", dish_type)
        if chef == None:
            chefid = None
        else:
            chefid = insertifnot("chef", chef)


        sqlsession.commit()
        # insert data from HTML form
        sqlsession.execute(insert(Table('recipes', metadata, autoload=True)).values({
            "recipe_name": recipe,
            "cuisine_id": cuisineid,
            "dish_type_id": dish_typeid,
            "chef_id": chefid,
            "link_book": link,
            "page_number": page,
            "time": time,
            "complexity": complexity,
            "recipe_approved": True                                                 ############## MUST BE MADE FALSE BEFORE RELEASE ###########
        }))

        # Find out the recipe_id
        lastid = sqlsession.execute("SELECT LAST_INSERT_ID() AS recipe_id")
        for row in lastid:
            recipeid = row['recipe_id']

        sqlsession.execute(insert(Table('users_recipes', metadata, autoload=True)).values({
            "user_id": session.get("user_id"),
            "recipe_id": recipeid
        }))

        # commit changes to the database
        sqlsession.commit()


        # return redirect("/appendingredients") ####### only once appendingredients is completed can this be used ###########
        return redirect("/")
    else:
        return render_template("addrecipes.html")
async def test_retry_multi_sql_on_deadlock(system):
    if not isinstance(system, DatabaseACESystem):
        pytest.skip("database-only test")

    await system.retry_multi_sql_on_deadlock(
        [
            insert(Config).values(key="test", value="value"),
            insert(Config).values(key="test2", value="value2"),
        ],
        commit=True,
    )
Exemple #15
0
async def create(async_db: Database, *, user_in: UserCreate) -> User:
    user = User(
        email=user_in.email,
        hashed_password=get_password_hash(user_in.password),
        full_name=user_in.full_name,
        is_superuser=user_in.is_superuser,
    )
    print(insert(UserORM))
    res = await async_db.execute(insert(UserORM), **user.dict())
    # db_session.commit()
    # db_session.refresh(user)
    return res
async def test_retry_on_deadlock_multi_executable(system):
    if not isinstance(system, DatabaseACESystem):
        pytest.skip("database-only test")

    await system.retry_on_deadlock(
        [
            insert(Config).values(key="test", value="value"),
            insert(Config).values(key="test2", value="value2"),
        ],
        commit=True,
    )

    async with system.get_db() as db:
        assert (await db.execute(select(Config).where(Config.key == "test"))).scalar().value == "value"
        assert (await db.execute(select(Config).where(Config.key == "test2"))).scalar().value == "value2"
Exemple #17
0
    async def _test_execute(self, engines, binds):
        test_table1 = self.test_models['db1'].test_table1
        test_table2 = self.test_models['db2'].test_table2
        async with Session(engines, binds) as session:
            q = sql.insert(test_table1).values(id=5, title='test_title')
            result = await session.execute(q)
            self.assertEqual(result.lastrowid, 5)
            q = sql.select(test_table1.c).where(test_table1.c.id == 5)
            result = await session.execute(q)
            self.assertEqual(result.rowcount, 1)
            result = list(result)
            self.assertEqual(result[0]['id'], 5)
            self.assertEqual(result[0]['title'], 'test_title')

            q = sql.update(test_table1).where(test_table1.c.id == 5).\
                    values(title='test_title2')
            result = await session.execute(q)
            self.assertEqual(result.rowcount, 1)
            q = sql.select(test_table1.c).\
                    where(test_table1.c.id == 5)
            result = await session.execute(q)
            self.assertEqual(result.rowcount, 1)
            result = list(result)
            self.assertEqual(result[0]['id'], 5)
            self.assertEqual(result[0]['title'], 'test_title2')

            q = sql.delete(test_table1).where(test_table1.c.id == 5)
            result = await session.execute(q)
            self.assertEqual(result.rowcount, 1)
            q = sql.select(test_table1.c).\
                    where(test_table1.c.id == 5)
            result = await session.execute(q)
            self.assertEqual(result.rowcount, 0)
Exemple #18
0
def upgrade(migrate_engine):
    TableBase.metadata.bind = migrate_engine

    Discussion.__table__.create()
    Comment.__table__.create()

    Artwork.__table__.c.discussion_id.nullable = True
    Artwork.__table__.c.discussion_id.create()
    User.__table__.c.discussion_id.nullable = True
    User.__table__.c.discussion_id.create()

    # Create a new discussion for each artwork and user
    conn = migrate_engine.connect()
    tr = conn.begin()

    for table in Artwork, User:
        for id, in conn.execute( sql.select([table.id]) ):
            res = conn.execute( sql.insert(Discussion.__table__) )
            discussion_id = res.inserted_primary_key[0]

            conn.execute(sql.update(
                table.__table__,
                table.__table__.c.id == id,
                dict(discussion_id=discussion_id),
            ))

    tr.commit()

    Artwork.__table__.c.discussion_id.alter(nullable=False)
    User.__table__.c.discussion_id.alter(nullable=False)
Exemple #19
0
 def _get_or_create_nevra(self, nevra):
     dep = self.nevras.get(nevra)
     if dep is None:
         dep = self.db.query(*Dependency.inevra)\
             .filter((Dependency.name == nevra[0]) &
                     (Dependency.epoch == nevra[1]) &
                     (Dependency.version == nevra[2]) &
                     (Dependency.release == nevra[3]) &
                     (Dependency.arch == nevra[4]))\
             .first()
         if dep is None:
             kwds = dict(name=nevra[0], epoch=nevra[1], version=nevra[2],
                         release=nevra[3], arch=nevra[4])
             dep_id = (
                 self.db.execute(
                     insert(
                         Dependency,
                         [kwds],
                         returning=(Dependency.id,)
                     )
                 )
                 .fetchone().id
             )
             dep = DepTuple(id=dep_id, **kwds)
             self.inserts += 1
         else:
             self.misses += 1
         self._add(dep)
     else:
         self.hits += 1
         self._access(dep)
     return dep
    def __init__(self):
        self._orm_engine = engine_from_config({
            'url': CONF.orm.url
        }, prefix='')

        metadata = MetaData()

        aa = models.create_alarm_action_model(metadata).alias('aa')
        nm = models.create_notification_method_model(metadata).alias('nm')
        nmt_insert = models.create_notification_method_type_model(metadata)
        nmt = nmt_insert.alias('nmt')
        a = models.create_alarm_model(metadata).alias('a')

        self._orm_query = select([nm.c.id, nm.c.type, nm.c.name, nm.c.address, nm.c.period])\
            .select_from(aa.join(nm, aa.c.action_id == nm.c.id))\
            .where(
                and_(aa.c.alarm_definition_id == bindparam('alarm_definition_id'),
                     aa.c.alarm_state == bindparam('alarm_state')))

        self._orm_get_alarm_state = select([a.c.state]).where(a.c.id == bindparam('alarm_id'))

        self._orm_nmt_query = select([nmt.c.name])

        self._orm_get_notification = select([nm.c.name, nm.c.type, nm.c.address, nm.c.period])\
            .where(nm.c.id == bindparam('notification_id'))

        self._orm_add_notification_type = insert(nmt_insert).values(name=bindparam('b_name'))

        self._orm = None
Exemple #21
0
    def create_user(self, username, password, email, **extra_fields):
        """Creates a user. Returns True on success."""
        if not self.allow_new_users:
            raise BackendError("Creation of new users is disabled")

        password_hash = sscrypt(password)
        values = {
            'username': username,
            'password': password_hash,
            'mail': email,
        }
        for field in ('userid', 'accountStatus', 'mailVerified', 'syncNode'):
            if field in extra_fields:
                values[field] = extra_fields[field]
        query = insert(users).values(**values)
        try:
            res = safe_execute(self._engine, query)
        except IntegrityError:
            #Name already exists
            return False

        if res.rowcount != 1:
            return False

        #need a copy with some of the info for the return value
        userobj = User()
        userobj['username'] = username
        userobj['userid'] = res.lastrowid
        userobj['mail'] = email

        return userobj
Exemple #22
0
def add_package(session, pkg, pkgdir, file_table):
    global conf
    logging.debug('add-package %s' % pkg)

    sumsfile = sums_path(pkgdir)
    sumsfile_tmp = sumsfile + '.new'

    def emit_checksum(out, relpath, abspath):
        if os.path.islink(abspath) or not os.path.isfile(abspath):
            # Do not checksum symlinks, if they are not dangling / external we
            # will checksum their target anyhow. Do not check special files
            # either; they shouldn't be there per policy, but they might be
            # (and they are in old releases)
            return
        sha256 = hashutil.sha256sum(abspath)
        out.write('%s  %s\n' % (sha256, relpath))

    if 'hooks.fs' in conf['backends']:
        if not os.path.exists(sumsfile):  # compute checksums only if needed
            with open(sumsfile_tmp, 'w') as out:
                for (relpath, abspath) in \
                        fs_storage.walk_pkg_files(pkgdir, file_table):
                    emit_checksum(out, relpath, abspath)
            os.rename(sumsfile_tmp, sumsfile)

    if 'hooks.db' in conf['backends']:
        db_package = db_storage.lookup_package(session, pkg['package'],
                                               pkg['version'])
        insert_q = sql.insert(Checksum.__table__)
        insert_params = []
        if not session.query(Checksum) \
                      .filter_by(package_id=db_package.id) \
                      .first():
            # ASSUMPTION: if *a* checksum of this package has already
            # been added to the db in the past, then *all* of them have,
            # as additions are part of the same transaction
            for (sha256, relpath) in parse_checksums(sumsfile):
                params = {'package_id': db_package.id, 'sha256': sha256}
                if file_table:
                    try:
                        file_id = file_table[relpath]
                        params['file_id'] = file_id
                    except KeyError:
                        continue
                else:
                    file_ = session.query(File) \
                                   .filter_by(package_id=db_package.id,
                                              path=relpath) \
                                   .first()
                    if not file_:
                        continue
                    params['file_id'] = file_.id
                insert_params.append(params)
                if len(insert_params) >= BULK_FLUSH_THRESHOLD:
                    session.execute(insert_q, insert_params)
                    session.flush()
                    insert_params = []
            if insert_params:  # source packages shouldn't be empty but...
                session.execute(insert_q, insert_params)
                session.flush()
Exemple #23
0
 def __init__(self, parent=None):
     super(Button, self).__init__(parent)
                                 
     msgBox = QMessageBox()
     msgBox.setWindowTitle("Artikelafroep")
     msgBox.setWindowIcon(QIcon('./images/logos/logo.jpg')) 
     msgBox.setStyleSheet("color: black;  background-color: gainsboro; font-size: 16px;height: 20px; width: 50px")
     msgBox.setText('De afroep '+str(mhoev)+' is groter dan nog te leveren '+str(round(mresterend,3)))
     msgBox.addButton(QPushButton('Doorgaan'), QMessageBox.YesRole)
     msgBox.addButton(QPushButton('Stoppen'), QMessageBox.RejectRole)
     retour = msgBox.exec_()
     if retour:
         artAfroep(idx)
     else:
         updmatl = update(materiaallijsten).where(and_(materiaallijsten.c.icalculatie ==\
             rpsel[1], materiaallijsten.c.artikelID == artikelen.c.artikelID,\
             materiaallijsten.c.artikelID == martikelnr)).values(afroep = mhoev,\
             resterend=materiaallijsten.c.resterend - round(mhoev,4))
         con.execute(updmatl)
         
         mlijstnr=(con.execute(select([func.max(raaplijst.c.lijstID, type_=Integer)\
                .label('mlijstnr')])).scalar())
         mlijstnr += 1
         insrl = insert(raaplijst).values(lijstID = mlijstnr, artikelID = martikelnr,\
             werkorder = koppelnr, afroep = mhoev, leverdatum = mlevdat,\
             meerwerk = mmmstatus, postcode = mpostcode, huisnummer = mhuisnr,\
             toevoeging = mtoev, alternatief = maltern, straat = mstraat, woonplaats = mplaats)
         con.execute(insrl)
         invoerOK()
 def add_as_notified(self, url_id):
     self.md.clear()
     md = MetaData(self.engine)
     t = Table('notification', md, autoload=True)
     i = insert(t).values(url_id=url_id, 
                          notified_date=datetime.now().strftime('%Y%m%d'))
     i.execute()
    def set_collection(self, user_id, collection_name, **values):
        """Creates a collection"""
        # XXX values is not used for now because there are no values besides
        # the name
        if self.collection_exists(user_id, collection_name):
            return

        values['userid'] = user_id
        values['name'] = collection_name

        if self._collections_by_id is not None:
            min_id = 100
        else:
            min_id = 0

        # getting the max collection_id
        # XXX why don't we have an autoinc here ?
        # see https://bugzilla.mozilla.org/show_bug.cgi?id=579096
        next_id = -1
        while next_id < min_id:
            query = self._get_query('COLLECTION_NEXTID', user_id)
            max_ = self._do_query_fetchone(query, user_id=user_id)
            if max_[0] is None:
                next_id = min_id
            else:
                next_id = max_[0] + 1

        # insertion
        values['collectionid'] = next_id
        query = insert(collections).values(**values)
        self._do_query(query, **values)
        return next_id
Exemple #26
0
def update_suites(status, conf, session, mirror):
    """update stage: sweep and recreate suite mappings

    """
    logging.info('update suites mappings...')

    insert_q = sql.insert(Suite.__table__)
    insert_params = []

    # load suites aliases
    suites_aliases = mirror.ls_suites_with_aliases()
    if not conf['dry_run'] and 'db' in conf['backends']:
        session.query(SuiteAlias).delete()

    for (suite, pkgs) in six.iteritems(mirror.suites):
        if not conf['dry_run'] and 'db' in conf['backends']:
            session.query(Suite).filter_by(suite=suite).delete()
        for pkg_id in pkgs:
            (pkg, version) = pkg_id
            db_package = db_storage.lookup_package(session, pkg, version)
            if not db_package:
                logging.warn('package %s/%s not found in suite %s, skipping'
                             % (pkg, version, suite))
            else:
                logging.debug('add suite mapping: %s/%s -> %s'
                              % (pkg, version, suite))
                params = {'package_id': db_package.id,
                          'suite': suite}
                insert_params.append(params)
                if pkg_id in status.sources:
                    # fill-in incomplete suite information in status
                    status.sources[pkg_id][-1].append(suite)
                else:
                    # defensive measure to make update_suites() more reusable
                    logging.warn('cannot find %s/%s during suite update'
                                 % (pkg, version))
        if not conf['dry_run'] and 'db' in conf['backends'] \
           and len(insert_params) >= BULK_FLUSH_THRESHOLD:
            session.execute(insert_q, insert_params)
            session.flush()
            insert_params = []

        if not conf['dry_run'] and 'db' in conf['backends']:
            session.query(SuiteInfo).filter_by(name=suite).delete()
            _add_suite(conf, session, suite, aliases=suites_aliases[suite])

    if not conf['dry_run'] and 'db' in conf['backends'] \
       and insert_params:
        session.execute(insert_q, insert_params)
        session.flush()

    # update sources.txt, now that we know the suite mappings
    src_list_path = os.path.join(conf['cache_dir'], 'sources.txt')
    with open(src_list_path + '.new', 'w') as src_list:
        for pkg_id, src_entry in six.iteritems(status.sources):
            fields = list(pkg_id)
            fields.extend(src_entry[:-1])  # all except suites
            fields.append(string.join(src_entry[-1], ','))
            src_list.write(string.join(fields, '\t') + '\n')
    os.rename(src_list_path + '.new', src_list_path)
Exemple #27
0
 def save_logbook(self, book):
     try:
         logbooks = self._tables.logbooks
         with self._engine.begin() as conn:
             q = (sql.select([logbooks]).
                  where(logbooks.c.uuid == book.uuid))
             row = conn.execute(q).first()
             if row:
                 e_lb = self._converter.convert_book(row)
                 self._converter.populate_book(conn, e_lb)
                 e_lb.merge(book)
                 conn.execute(sql.update(logbooks)
                              .where(logbooks.c.uuid == e_lb.uuid)
                              .values(e_lb.to_dict()))
                 for fd in book:
                     e_fd = e_lb.find(fd.uuid)
                     if e_fd is None:
                         e_lb.add(fd)
                         self._insert_flow_details(conn, fd, e_lb.uuid)
                     else:
                         self._update_flow_details(conn, fd, e_fd)
                 return e_lb
             else:
                 conn.execute(sql.insert(logbooks, book.to_dict()))
                 for fd in book:
                     self._insert_flow_details(conn, fd, book.uuid)
                 return book
     except sa_exc.DBAPIError:
         exc.raise_with_cause(
             exc.StorageFailure,
             "Failed saving logbook '%s'" % book.uuid)
    def _upsert_generic(self, table, items, annotations):
        """Upsert a batch of items one at a time, trying INSERT then UPDATE.

        This is a tremendously inefficient way to write a batch of items,
        but it's guaranteed to work without special cooperation from the
        database.  For MySQL we use the much improved _upsert_onduplicatekey.
        """
        userid = items[0].get("userid")
        num_created = 0
        for item in items:
            assert item.get("userid") == userid
            try:
                # Try to insert the item.
                # If it already exists, this fails with an integrity error.
                query = insert(table).values(**item)
                self.execute(query, item, annotations).close()
                num_created += 1
            except IntegrityError:
                # Update the item.
                # Use the table's primary key fields in the WHERE clause,
                # and put all other fields into the UPDATE clause.
                item = item.copy()
                query = update(table)
                for key in table.primary_key:
                    try:
                        query = query.where(key == item.pop(key.name))
                    except KeyError:
                        msg = "Item is missing primary key column %r"
                        raise ValueError(msg % (key.name,))
                query = query.values(**item)
                self.execute(query, item, annotations).close()
        return num_created
Exemple #29
0
async def insert_order(conn: SAConn, order: Order):
    try:
        _order = object_as_dict(order)
        await conn.execute(insert(Order).values(**_order))
        return True
    except Exception as ex:
        return False
Exemple #30
0
    def _run_update(self):

        rd: RpmDetail = aliased(RpmDetail)
        rdu: RpmDetail = aliased(RpmDetail)
        fd: FileDetail = aliased(FileDetail)
        lk: RpmFileDetailLink = aliased(RpmFileDetailLink)

        query = State.get_db_session().query(
            rd.rpm_detail_id, fd.file_detail_id).join(
                fd, (rd.system_id == fd.system_id) &
                (rd.file_location == fd.file_location)).outerjoin(
                    lk, (lk.file_detail_id == fd.file_detail_id) &
                    (lk.rpm_detail_id == rd.rpm_detail_id)).filter(
                        rd.system_id == self.system.system_id,
                        lk.rpm_file_detail_link_id == None)

        insert_dml = insert(RpmFileDetailLink).from_select([
            rd.rpm_detail_id,
            fd.file_detail_id,
        ], query)

        result = State.get_db_session().execute(insert_dml)
        log.debug(f"{result.rowcount} files linked.")
        State.get_db_session().flush()
        State.get_db_session().commit()
        self.analyze_database()

        return result.rowcount
Exemple #31
0
def connect_notes(id, origin, target):
    try:
        owner = authenticate()

        conn = engine.connect()
        if origin < target:
            origin_insert = origin
            target_insert = target
        elif origin > target:
            origin_insert = target
            target_insert = origin
        else:
            raise InvalidInformation("Note cannot reference to itself.")

        query = sql.insert(Connection.__table__,
                           values={
                               Connection.workspace: id,
                               Connection.origin: origin_insert,
                               Connection.target: target_insert,
                           })
        result = conn.execute(query)
        return jsonify({
            "status": "ok",
            "connection": {
                "id": result.lastrowid,
                "origin": origin_insert,
                "target": target_insert,
            }
        })
    except MissingInformation as e:
        return jsonify({"status": "error", "message": e.message})
Exemple #32
0
async def test_pre_register(http_server_client, default_db):
    """ test the user pre registered no password """
    with ConnectionMgr.session(default_db) as session:
        session.execute(insert(user, {"email": "*****@*****.**"}))
        session.commit()

    body = urllib.parse.urlencode({
        "email": "*****@*****.**",
        "password": "******",
        "submit": "register"
    })
    headers = {
        "Content-type": "application/x-www-form-urlencoded",
        "Accept": "text/plain",
    }
    try:
        await http_server_client.fetch(
            "/login",
            headers=headers,
            method="POST",
            body=body,
            follow_redirects=False,
        )
    except HTTPClientError as ex:
        print(ex.response.headers)
        cookie = ex.response.headers["Set-Cookie"]
        assert ex.code == 302

    resp = await http_server_client.fetch("/", headers={"Cookie": cookie})
    assert resp.code == 200
Exemple #33
0
 def create_user(self, user_name, password, email):
     """Creates a user. Returns True on success."""
     password_hash = ssha256(password)
     query = insert(users).values(username=user_name, email=email,
                                  password_hash=password_hash, status=1)
     res = safe_execute(self._engine, query)
     return res.rowcount == 1
Exemple #34
0
 def _get_or_create_nevra(self, nevra):
     dep = self.nevras.get(nevra)
     if dep is None:
         dep = self.db.query(*Dependency.inevra)\
             .filter((Dependency.name == nevra[0]) &
                     (Dependency.epoch == nevra[1]) &
                     (Dependency.version == nevra[2]) &
                     (Dependency.release == nevra[3]) &
                     (Dependency.arch == nevra[4]))\
             .first()
         if dep is None:
             kwds = dict(name=nevra[0],
                         epoch=nevra[1],
                         version=nevra[2],
                         release=nevra[3],
                         arch=nevra[4])
             dep_id = (self.db.execute(
                 insert(Dependency, [kwds],
                        returning=(Dependency.id, ))).fetchone().id)
             dep = DepTuple(id=dep_id, **kwds)
             self.inserts += 1
         else:
             self.misses += 1
         self._add(dep)
     else:
         self.hits += 1
         self._access(dep)
     return dep
Exemple #35
0
    def _run_link_match_update(self):

        rd: RpmDetail = aliased(RpmDetail)
        fd: FileDetail = aliased(FileDetail)
        lk: RpmFileDetailLink = aliased(RpmFileDetailLink)

        query = State.get_db_session().query(
            rd.rpm_detail_id,
            fd.file_detail_id,
        ).join(ResolvedSymlinks, (ResolvedSymlinks.system_id == rd.system_id) &
               (ResolvedSymlinks.file_location == rd.file_location)).join(
                   fd, (ResolvedSymlinks.system_id == fd.system_id) &
                   (fd.file_location
                    == ResolvedSymlinks.resolved_location)).outerjoin(
                        lk, (lk.file_detail_id == fd.file_detail_id) &
                        (lk.rpm_detail_id == rd.rpm_detail_id)).filter(
                            rd.system_id == self.system_id,
                            lk.rpm_file_detail_link_id == None)

        insert_dml = insert(RpmFileDetailLink).from_select([
            rd.rpm_detail_id,
            fd.file_detail_id,
        ], query)

        result = State.get_db_session().execute(insert_dml)
        State.get_db_session().flush()
        State.get_db_session().commit()
        self.analyze_database()
        return result.rowcount
Exemple #36
0
    async def stat_post(self, post_id):
        data = {
            'like_count': 0,
        }

        result = await self.post_like_repo.execute(
            sasql.select([sasql.func.count()
                          ]).select_from(self.post_like_repo.table).where(
                              self.post_like_repo.table.c.post_id == post_id))
        data['like_count'] = await result.scalar()

        result = await self.post_stat_repo.execute(
            self.post_stat_repo.table.select().where(
                self.post_stat_repo.table.c.post_id == post_id))
        row = await result.first()
        if row is None:
            await self.post_stat_repo.execute(
                sasql.insert(self.post_stat_repo.table).values(post_id=post_id,
                                                               **data))
        else:
            await self.post_stat_repo.execute(
                sasql.update(self.post_stat_repo.table).where(
                    self.post_stat_repo.table.c.post_id == post_id).values(
                        **data))

        return await self.post_stat_info_by_post_id(post_id)
Exemple #37
0
def filled_engine():
    """
    filled_engine manages database records for each test.

    データベースを利用してテストする際に利用するfixture。
    テストケース実行前にレコードを保存し、実行後に保存したレコードを全て削除する。
    :return:
    """
    # テストのたびにデータベースに事前データを登録する。
    engine = new_engine()
    jst = timezone(timedelta(hours=9), 'JST')
    # 端数が出ているとwhere句での検索が大変厳しいので、決め打ちの値にしておく。
    now = datetime(2019, 12, 1, 11, 30, tzinfo=jst)

    # prepare users
    til_users = til_users_tables()
    users = preset_til_users(now)
    engine.execute(insert(til_users), users)

    # テストを実施する。
    yield engine

    # テストのたびにprepare以降に作成したデータを削除する。
    stmt = delete(til_users).where(column("created").__eq__(now))
    engine.execute(stmt)
 def save_logbook(self, book):
     try:
         logbooks = self._tables.logbooks
         with self._engine.begin() as conn:
             q = (sql.select([logbooks
                              ]).where(logbooks.c.uuid == book.uuid))
             row = conn.execute(q).first()
             if row:
                 e_lb = self._converter.convert_book(row)
                 self._converter.populate_book(conn, e_lb)
                 e_lb.merge(book)
                 conn.execute(
                     sql.update(logbooks).where(
                         logbooks.c.uuid == e_lb.uuid).values(
                             e_lb.to_dict()))
                 for fd in book:
                     e_fd = e_lb.find(fd.uuid)
                     if e_fd is None:
                         e_lb.add(fd)
                         self._insert_flow_details(conn, fd, e_lb.uuid)
                     else:
                         self._update_flow_details(conn, fd, e_fd)
                 return e_lb
             else:
                 conn.execute(sql.insert(logbooks, book.to_dict()))
                 for fd in book:
                     self._insert_flow_details(conn, fd, book.uuid)
                 return book
     except sa_exc.DBAPIError:
         exc.raise_with_cause(exc.StorageFailure,
                              "Failed saving logbook '%s'" % book.uuid)
Exemple #39
0
async def insert_tx(conn: SAConn, tx: Tx) -> bool:
    try:
        _tx = object_as_dict(tx)
        await conn.execute(insert(Tx).values(**_tx))
        return True
    except Exception as ex:
        return False
Exemple #40
0
def register():
	form = CadForm()
	if form.validate_on_submit():
		if form.password.data == form.re_password.data:
			s = select([User]).where(or_(User.username==form.username.data, User.email==form.email.data)).limit(1)
			result = conn.execute(s)

			is_unique = True
			for row in result:
				is_unique = False
				break

			if not is_unique:
				flash("This is user is already registered.")
			else:
				stmt = insert(User).values(username=form.username.data, email=form.email.data,
									   password=form.password.data, name=form.username.data)
				conn.execute(stmt)
				flash("Successfully registered! Please log in, now.")
				return redirect(url_for('login'))
		else:
			flash("The password don't match.")



	return render_template('register.html', form=form)
Exemple #41
0
    def set_collection(self, user_id, collection_name, **values):
        """Creates a collection"""
        # XXX values is not used for now because there are no values besides
        # the name
        if self.collection_exists(user_id, collection_name):
            return

        values['userid'] = user_id
        values['name'] = collection_name

        if self.standard_collections:
            ids = _STANDARD_COLLECTIONS.keys()
            min_id = max(ids) + 1
        else:
            min_id = 0

        # getting the max collection_id
        # XXX why don't we have an autoinc here ?
        # see https://bugzilla.mozilla.org/show_bug.cgi?id=579096
        next_id = -1
        while next_id < min_id:
            query = self._get_query('COLLECTION_NEXTID', user_id)
            max_ = self._do_query_fetchone(query, user_id=user_id)
            if max_[0] is None:
                next_id = min_id
            else:
                next_id = max_[0] + 1

        # insertion
        values['collectionid'] = next_id
        query = insert(collections).values(**values)
        self._do_query(query, **values)
        return next_id
Exemple #42
0
def subscribe_to_level():
    try:
        token = request.form["token"]
        level_id = request.form["id"]

        if token is None:
            raise MissingInformation("token")
        if level_id is None:
            raise MissingInformation("id")
    except MissingInformation as e:
        return make_error(e.message)

    try:
        user_id = get_user_id_from_token(token)
    except InvalidInformation as e:
        return make_error(e.message)

    print(level_id, user_id)

    conn = engine.connect()
    query = sql.insert(
        Subscription,
        values={Subscription.level_id: level_id,
                Subscription.user_id: user_id}
    )
    x = conn.execute(query)
    # if x:
    #     print("1")
    # else:
    #     print("2")

    return make_status("success", "Subscribed to level")
Exemple #43
0
 def _append_user(self):
     """Add new recommend user."""
     self.md.clear()
     t = Table('user', self.md, autoload=True)
     i = insert(t).values(name=self.name)
     i.execute()
     # TODO: Change logic.
     _id = self._load_user_no()
     logging.info('Add new user(id={}, name={}).'.format(_id, self.name))
     return _id
 def _register(self, url_id):
     """Register bookmark transaction."""
     self.md.clear()
     md = MetaData(self.engine)
     t = Table('bookmark', md, autoload=True)
     i = insert(t).values(url_id=url_id,
                          user_id=self.user.id,
                          registered_date=int(
                              date.today().strftime("%Y%m%d")))
     i.execute()
Exemple #45
0
 async def test_get_engine(self, engines, binds):
     test_table1 = self.test_models['db1'].test_table1
     test_table2 = self.test_models['db2'].test_table2
     async with Session(engines, binds) as session:
         i1 = sql.insert(test_table1)
         i2 = sql.insert(test_table2)
         u1 = sql.update(test_table1)
         u2 = sql.update(test_table2)
         d1 = sql.delete(test_table1)
         d2 = sql.delete(test_table2)
         s1 = sql.insert(test_table1)
         s2 = sql.insert(test_table2)
         for q1, q2 in [(i1, i2), (u1, u2), (d1, d2), (s1, s2)]:
             engine1 = session.get_engine(q1)
             engine2 = session.get_engine(q2)
             self.assertEqual(engine1, engines['db1'])
             self.assertEqual(engine2, engines['db2'])
         with self.assertRaises(exc.OrmError):
             session.get_engine('error query')
Exemple #46
0
 def thr_execute(self, txn, **data):
     stmt = sql.insert(self.table).values(**data)
     compiled = stmt.compile(dialect=pysqlite.dialect(paramstyle="named"))
     raw_sql = unicode(compiled)
     params = compiled.params
     self.log.debug(' sql: %s , params: %s', repr(raw_sql), params)
     try:
         txn.execute(raw_sql, params)
     except:
         raise
     return None
 def set_user_info(self, uid=None, pemail="", sname="", fname="", avatar="", nickname="", poco_server="", **kw):
     if uid is None:
         uid = self.uid
     query = insert(user_info).values(
         uid=uid, pemail=pemail, sname=sname, fname=fname, avatar=avatar, nickname=nickname, poco_server=poco_server
     )
     res = self._db.execute(query)
     if res.rowcount != 1:
         logger.debug("Unable to add user info ")
         return False
     return True
Exemple #48
0
 def set_group_content(self, group, contents, append=False):
     existing_names = set(self.db.query(PackageGroupRelation.package_name)\
                          .filter_by(group_id=group.id).all_flat())
     rels = []
     for name in set(contents):
         if not append or name not in existing_names:
             rels.append(dict(group_id=group.id, package_name=name))
     if not append:
         self.db.query(PackageGroupRelation).filter_by(group_id=group.id).delete()
     if rels:
         self.db.execute(insert(PackageGroupRelation, rels))
Exemple #49
0
 def _append(self):
     """Add new feed url into database."""
     logging.info('SAVE MY FEED')
     logging.info(self.url)
     self.md.clear()
     md = MetaData(self.engine)
     t = Table('feed', md, autoload=True)
     i = insert(t).values(url=self.url,
                          title=self.title)
     i.execute()
     logging.info('----------------------')
Exemple #50
0
def create_vocabulary(obj):
    """
    Create a vocabulary

    :param dict obj: values for the vocabulary fields
    """
    query = (
        insert(vocabulary_table)
        .values(**obj)
        .returning(vocabulary_table.c.id))
    return query.execute().first()[0]
def add_package(session, pkg, pkgdir, file_table):
    global conf
    logging.debug("add-package %s" % pkg)

    sumsfile = sums_path(pkgdir)
    sumsfile_tmp = sumsfile + ".new"

    def emit_checksum(out, relpath, abspath):
        if os.path.islink(abspath) or not os.path.isfile(abspath):
            # Do not checksum symlinks, if they are not dangling / external we
            # will checksum their target anyhow. Do not check special files
            # either; they shouldn't be there per policy, but they might be
            # (and they are in old releases)
            return
        sha256 = hashutil.sha256sum(abspath)
        out.write("%s  %s\n" % (sha256, relpath))

    if "hooks.fs" in conf["backends"]:
        if not os.path.exists(sumsfile):  # compute checksums only if needed
            with open(sumsfile_tmp, "w") as out:
                for (relpath, abspath) in fs_storage.walk_pkg_files(pkgdir, file_table):
                    emit_checksum(out, relpath, abspath)
            os.rename(sumsfile_tmp, sumsfile)

    if "hooks.db" in conf["backends"]:
        db_package = db_storage.lookup_package(session, pkg["package"], pkg["version"])
        insert_q = sql.insert(Checksum.__table__)
        insert_params = []
        if not session.query(Checksum).filter_by(package_id=db_package.id).first():
            # ASSUMPTION: if *a* checksum of this package has already
            # been added to the db in the past, then *all* of them have,
            # as additions are part of the same transaction
            for (sha256, relpath) in parse_checksums(sumsfile):
                params = {"package_id": db_package.id, "sha256": sha256}
                if file_table:
                    try:
                        file_id = file_table[relpath]
                        params["file_id"] = file_id
                    except KeyError:
                        continue
                else:
                    file_ = session.query(File).filter_by(package_id=db_package.id, path=relpath).first()
                    if not file_:
                        continue
                    params["file_id"] = file_.id
                insert_params.append(params)
                if len(insert_params) >= BULK_FLUSH_THRESHOLD:
                    session.execute(insert_q, insert_params)
                    session.flush()
                    insert_params = []
            if insert_params:  # source packages shouldn't be empty but...
                session.execute(insert_q, insert_params)
                session.flush()
Exemple #52
0
def api_user_create():
    try:
        auth_token = request.form["auth_token"]

        user_first_name = request.form["user_first_name"]
        user_last_name = request.form["user_last_name"]
        user_other_names = request.form["user_other_names"] or None

        user_email = request.form["user_email"] or None
        user_username = request.form["user_first_name"]
        user_password = request.form["user_password"] or gen_password(8)

        user_rank = request.form["rank"]

        if "" in (user_first_name, user_last_name, user_username, user_password, user_rank):
            raise KeyError

    except KeyError as e:
        raise APIMissingField(e.args[0])

    caller = get_user_from_token(auth_token)

    if caller.rank != "admin":
        return make_response(jsonify({
            "status": "failed",
            "message": "You do not have a high enough rank to create users."
        }))

    conn = engine.connect()
    query = sql.insert(User, {
        User.first_name: user_first_name,
        User.last_name: user_last_name,
        User.other_names: user_other_names,

        User.email: user_email,
        User.username: user_username,
        User.pass_hash: crypt_hash(user_password),

        User.rank: user_rank
    })
    res = conn.execute(query)

    if res.inserted_primary_key:
        return make_response(jsonify({
            "status": "success",
            "message": "User created",
            "user_id": res.inserted_primary_key
        }))
    else:
        return make_response(jsonify({
            "status": "failed",
            "message": "Unknown error"
        }))
Exemple #53
0
    def set_user(self, user_id, **values):
        """set information for a user. values contains the fields to set.

        If the user doesn't exists, it will be created.
        """
        values['id'] = user_id
        if not self.user_exists(user_id):
            query = insert(users).values(**values)
        else:
            query = update(users).where(users.c.id == user_id)
            query = query.values(**values)
        self._do_query(query)
Exemple #54
0
def add_external_signature_request(session, target_suite, suite, binary):
    tbl_ba = DBConn().tbl_bin_associations
    tbl_esr = DBConn().tbl_external_signature_requests

    # TODO [sqlalchemy >= 1.1]: use `ON CONFLICT DO NOTHING`
    #select = sql.select([tbl_ba.c.id, target_suite.suite_id]).where((tbl_ba.c.suite == suite.suite_id) & (tbl_ba.c.bin == binary.binary_id))
    #insert = pgsql.insert(tbl_esr).from_select([tbl_esr.c.association_id, tbl_esr.c.suite_id], select).on_conflict_do_nothing()
    #session.execute(insert)

    ba_id = session.execute(sql.select([tbl_ba.c.id]).where((tbl_ba.c.suite == suite.suite_id) & (tbl_ba.c.bin == binary.binary_id))).scalar()
    exists = session.execute(sql.select([tbl_esr]).where(tbl_esr.c.association_id == ba_id).where(tbl_esr.c.suite_id == target_suite.suite_id)).first()
    if exists is None:
        insert = sql.insert(tbl_esr).values(association_id=ba_id, suite_id=target_suite.suite_id)
        session.execute(insert)
Exemple #55
0
    def _set_reset_code(self, user_id):
        code = self.rc._generate_reset_code()
        expiration = datetime.datetime.now() + datetime.timedelta(hours=6)
        query = delete(reset_codes).where(reset_codes.c.username == user_id)
        self._engine.execute(query)

        query = insert(reset_codes).values(reset=code, expiration=expiration, username=user_id)

        res = safe_execute(self._engine, query)

        if res.rowcount != 1:
            logger.debug("Unable to add a new reset code in the" " reset_code table")
            return None  # XXX see if appropriate

        return code
Exemple #56
0
def post_level_score():
    user_id = get_user_id_from_token()
    level_id = int(request.form["level_id"])
    score = int(request.form["score"])

    conn = engine.connect()
    query = sql.insert(Score.__table__,
               values={
                   Score.user_id: user_id,
                   Score.level_id: level_id,
                   Score.score: score
               })

    conn.execute(query)

    return "true"
Exemple #57
0
def add_suite(conf, session, suite, archive):
    logging.info('add sticky suite %s to the archive...' % suite)

    db_suite = db_storage.lookup_db_suite(session, suite, sticky=True)
    if not db_suite:
        if updater.STAGE_EXTRACT in conf['stages']:
            updater._add_suite(conf, session, suite, sticky=True)
    else:
        logging.warn('sticky suite %s already exist, looking for new packages'
                     % suite)

    if updater.STAGE_EXTRACT in conf['stages']:
        for pkg in archive.ls(suite):
            db_package = db_storage.lookup_package(session, pkg['package'],
                                                   pkg['version'])
            if db_package:  # avoid GC upon removal from a non-sticky suite
                if not db_package.sticky and not conf['dry_run']:
                    logging.debug('setting sticky bit on %s' % pkg)
                    db_package.sticky = True
            else:
                if not conf['single_transaction']:
                    with session.begin():
                        updater._add_package(pkg, conf, session, sticky=True)
                else:
                    updater._add_package(pkg, conf, session, sticky=True)
        session.flush()  # to fill Package.id-s

    if updater.STAGE_SUITES in conf['stages']:
        suitemap_q = sql.insert(Suite.__table__)
        suitemaps = []
        for (pkg, version) in archive.suites[suite]:
            db_package = db_storage.lookup_package(session, pkg, version)
            if not db_package:
                logging.warn('package %s/%s not found in sticky suite'
                             ' %s, skipping'
                             % (pkg, version, suite))
                continue
            if not db_storage.lookup_suitemapping(session, db_package, suite):
                suitemaps.append({'package_id': db_package.id,
                                  'suite': suite})
        if suitemaps and not conf['dry_run']:
            session.execute(suitemap_q, suitemaps)

    _add_stats_for(conf, session, suite)

    logging.info('sticky suite %s added to the archive.' % suite)
Exemple #58
0
    def _set_reset_code(self, user_id):
        code, expiration = generate_reset_code()
        query = delete(reset_codes).where(reset_codes.c.username == user_id)
        self._engine.execute(query)

        query = insert(reset_codes).values(reset=code,
                                           expiration=expiration,
                                           username=user_id)

        res = safe_execute(self._engine, query)

        if res.rowcount != 1:
            logger.debug('Unable to add a new reset code in the'
                         ' reset_code table')
            return None  # XXX see if appropriate

        return code