Example #1
0
async def register(request):
    try:
        first_name = request.form.get("first_name").strip()
        last_name = request.form.get("last_name").strip()
        email = request.form.get("email").strip()
        password = request.form.get("password")
        confirm_password = request.form.get("confirm_password")
        if not first_name or not last_name: raise Exception("name_missing")
        if not email or not password: raise Exception("credential_missing")
        if password != confirm_password: raise Exception("password_matching")
    except Exception as e:
        logger.info(e)
        return redirect(
            request.app.url_for("register_page") + "?failure={}".format(e))

    with db_session():
        if User.exists(email=email):
            logger.info("User with email {} exists.".format(email))
            return redirect(
                request.app.url_for("register_page") + "?failure=exists")

        password_hash = User.hash_password(password)
        user = User(email=email,
                    password_hash=password_hash,
                    first_name=first_name,
                    last_name=last_name,
                    date_registered=datetime.now())

    with db_session():
        user = User.get(email=email)
        temp_token = url_safe_serializer.dumps(str(user.id))
    return redirect(
        request.app.url_for("auth.send_verification_email", token=temp_token))
Example #2
0
async def reset_password(request, token):
    global url_safe_serializer
    try:
        user_id = url_safe_serializer.loads(token, max_age=60 * 60 * 24)
        with db_session():
            user = User.get(id=user_id, active_token=token)
        if not user: raise Exception()
    except SignatureExpired as e:
        return html(GenericResponse.token_expired(request.app))
    except Exception as e:
        return html(GenericResponse.token_invalid(request.app))

    try:
        password = request.form.get("password")
        confirm_password = request.form.get("confirm_password")
        if not confirm_password or not password:
            raise Exception("credential_missing")
        if password != confirm_password: raise Exception("password_matching")
    except Exception as e:
        logger.info(e)
        return redirect(
            request.app.url_for("reset_password_page") +
            "?failure={}".format(e))

    password_hash = User.hash_password(password)
    with db_session():
        User[user_id].set(password_hash=password_hash, active_token="")

    response_html = GenericResponse.password_reset_success(request.app)
    return html(response_html)
Example #3
0
    def do_recreate_indexes(self, mds: MetadataStore, base_duration):
        index_total = None
        index_num = 0
        t0 = t1 = now()

        # SQLite callback handler to update progress bar during index creation
        def index_callback_handler():
            try:
                t2 = now()
                index_percentage = calc_progress(t2 - t1, base_duration / 8.0)
                total_percentage = (index_num * 100.0 + index_percentage) / index_total
                self.notification_callback(f"recreating indexes\n"
                                       f"{total_percentage:.2f}% done")
            except Exception as e:
                self._logger.error(f"Error in SQLite callback handler: {type(e).__name__}:{str(e)}")
                self.shutting_down = True

        # Recreate table indexes
        with db_session(ddl=True):
            connection = mds._db.get_connection()
            try:
                db_objects = mds.get_objects_to_create()
                index_total = len(db_objects)
                for i, obj in enumerate(db_objects):
                    index_num = i
                    t1 = now()
                    connection.set_progress_handler(index_callback_handler, 5000)
                    obj.create(mds._db.schema.provider, connection)
                    duration = now() - t1
                    self._logger.info(f"Upgrade: created {obj.name} in {duration:.2f} seconds")
            finally:
                connection.set_progress_handler(None, 0)

        duration = now() - t0
        self._logger.info(f'Recreated all indexes in {duration:.2f} seconds')
        t1 = now()

        # SQLite callback handler to update progress bar during FTS index creation
        def fts_callback_handler():
            try:
                t2 = now()
                self.notification_callback("adding full text search index...\n"
                                       f"{calc_progress(t2 - t1, base_duration):.2f}% done")
            except Exception as e:
                self._logger.error(f"Error in SQLite callback handler: {type(e).__name__}:{str(e)}")
                self.shutting_down = True

        # Create FTS index
        with db_session(ddl=True):
            mds.create_fts_triggers()
            connection = mds._db.get_connection()
            connection.set_progress_handler(fts_callback_handler, 5000)
            try:
                t = now()
                mds.fill_fts_index()
                duration = now() - t
                self._logger.info(f'Upgrade: fill FTS in {duration:.2f} seconds')
            finally:
                connection.set_progress_handler(None, 0)
        mds.shutdown()
Example #4
0
def test_endpoint_with_cookie():
    with db_session():
        cart_id = str(uuid.uuid4())
        cart = Cart(id=cart_id)
        cart_item = Item(cart=cart,
                         external_id=str(uuid.uuid4()),
                         value=1,
                         name="Initial")

    payload = {
        "external_id": cart_item.external_id,
        "value": 100,
        "name": "Changed"
    }
    request, response = app.test_client.post("/item",
                                             data=json.dumps(payload),
                                             cookies={"cart_id": cart_id})

    assert response.status_code == 204

    with db_session():
        cart = Cart.get(id=cart_id)
        assert len(cart.items) == 1
        db_item = cart.items.copy().pop()
        assert db_item.external_id == cart_item.external_id
Example #5
0
def post_experiments(body, response, user: hug.directives.user):
    with orm.db_session():
        owner = User[user['id']]
        if body is None or 'name' not in body or 'variable_names' not in body:
            raise falcon.HTTPBadRequest()
        expr = Experiment(owner=owner,
                          name=body['name'],
                          variable_names=body['variable_names'])
    with orm.db_session():
        return expr.summary()
Example #6
0
async def remove(request, user, watchlist, movie_id):
    logger.debug(movie_id)
    with db_session():
        watchlist = Watchlist.get(id=watchlist.id)
        MovieItem.select(lambda m: m.imdb_id == movie_id and m.watchlist.id ==
                         watchlist.id).delete()
    return response.empty()
Example #7
0
def create_project(file_path, overwrite=False):
    '''
    checks if file is valid HVAC spreadsheet file
    and if so saves Project data into database
    '''
    with orm.db_session():
        project = models.Project.get(path=file_path)

        if project == None:
            project = models.Project(path=file_path)
        elif overwrite == False:
            return None

        version = xl_functions.check_version(file_path)
        project.spreadsheet_version = version
        project.debug = None
        project.number, project.name = os_functions.get_proj_no_and_name(
            file_path)
        project.date_modified = os_functions.get_date_modified(file_path)

        if version == None:
            return None

        version_map = 'ver' + version.replace('.', '')
        if version_map in dir(mappings):
            project.xl_mapping_present = True
Example #8
0
def cfg_10_identical_statements(mockcfg):
    """Return a cfg dict containing a db prepopulated with 10 nearly identical statements
    The only difference is id.
    """
    Statement = mockcfg["db"].entities["Statement"]
    with orm.db_session():
        for i in range(1, 101):
            data = {
                "@id":
                "S{:03d}".format(11 - i),
                "createdBy":
                "Creator 1",
                "createdWhen":
                datetime.datetime(2015, 1, 1).isoformat(),
                "modifiedBy":
                "Modifier 1",
                "modifiedWhen":
                datetime.datetime(2015, 1, 2).isoformat(),
                "uris": [
                    "https://example.com/statements/1",
                    "https://example.com/statements/2",
                ],
            }
            Statement.create_from_ipif(data)
    yield mockcfg
Example #9
0
    def delete_image(self):
        # count image table position
        image_table_position = list(IMAGE_PATH_DICT.keys()).index(
            self.active_image_id)

        message = QMessageBox().question(
            self,
            "Confirm deletion",
            "Delete duplicate media file?",
            QMessageBox.Yes | QMessageBox.No,
        )
        if message == QMessageBox.Yes:
            self.imageListTable.removeRow(image_table_position)
            image_id = IMAGE_PATH_DICT[self.active_image_id]["id"]
            # run custom delete
            with db_session():
                Image[image_id].custom_delete()

            # delete image key from dict
            del IMAGE_PATH_DICT[self.active_image_id]

            self.hide_active_image()
            QMessageBox().information(self, "File deletion",
                                      "File success deleted", QMessageBox.Ok,
                                      QMessageBox.Ok)
        elif message == QMessageBox.No:
            pass
Example #10
0
    def test1(self):
        db_args, db_kwargs = self.db_params

        db = orm.Database(*db_args, **db_kwargs)

        class Test(db.Entity):
            field = orm.Required(str)

        class Parent(db.Entity):
            _discriminator_ = 1
            type_field = orm.Discriminator(int)
            field = orm.Required(str)

            orm.composite_index(type_field, field)

        class Child(Parent):
            _discriminator_ = 2
            another_field = orm.Required(str)

            orm.composite_key("type_field", another_field)

        command.migrate(db, "make -v")
        command.migrate(db, "apply -v")

        with orm.db_session():
            Parent(field="abcdef")
            Child(field="qwerty", another_field="1234")
Example #11
0
async def login(request):
    email = request.form.get("email")
    password = request.form.get("password")
    if not email or not password:
        logger.info("Missing fields")
        return redirect(request.app.url_for("login_page") + "?failure=invalid")

    with db_session():
        user = User.get(email=email)

    if not (user and user.check_password(password)):
        logger.info("User not found/incorrect password ({})".format(email))
        return redirect(
            request.app.url_for("login_page") + "?failure=password")

    if not user.is_verified:
        temp_token = url_safe_serializer.dumps(str(user.id))
        return redirect(
            request.app.url_for("auth.send_verification_email",
                                token=temp_token))

    response = redirect(request.app.url_for("landing_page"))
    serializer = TimedJSONWebSignatureSerializer(config.JWT_SECRET,
                                                 expires_in=60 * 60 * 24 * 7)
    access_token = serializer.dumps({"user_id": str(user.id)})
    response.cookies["token"] = access_token.decode()
    response.cookies["token"]["expires"] = datetime.now() + timedelta(days=1)

    return response
Example #12
0
    def on_message(self, peer, raw: RawTagOperationMessage):
        operation, _ = self.serializer.unpack_serializable(
            TagOperation, raw.operation)
        signature, _ = self.serializer.unpack_serializable(
            TagOperationSignature, raw.signature)
        self.logger.debug(f'<- message received: {operation}')
        try:
            remote_key = self.crypto.key_from_public_bin(
                operation.creator_public_key)

            self.requests.validate_peer(peer)
            self.verify_signature(packed_message=raw.operation,
                                  key=remote_key,
                                  signature=signature.signature,
                                  operation=operation)
            self.validate_operation(operation)

            with db_session():
                is_added = self.db.add_tag_operation(operation,
                                                     signature.signature)
                if is_added:
                    self.logger.info(
                        f'+ tag added ({operation.tag} ,{operation.infohash.hex()})'
                    )

        except PeerValidationError as e:  # peer has exhausted his response count
            self.logger.warning(e)
        except ValueError as e:  # validation error
            self.logger.warning(e)
        except InvalidSignature as e:  # signature verification error
            self.logger.warning(e)
Example #13
0
def test_facebook_loginout(app):
    db.bind('sqlite', 'muorigin.sqlite', create_db=True)
    db.generate_mapping(create_tables=True)
    app.wsgi_app = orm.db_session(app.wsgi_app)

    with app.test_client() as c:
        r = do_login(c)
        assert '200 OK' == r.status
        data = r.data.decode('utf8')
        assert 'successfully logged in' in data

        r = do_login(c)
        assert '200 OK' == r.status
        data = r.data.decode('utf8')
        assert 'already logged in' in data

        r = do_logout(c)
        assert '200 OK' == r.status
        data = r.data.decode('utf8')
        assert 'successfully logged out' in data

        r = do_logout(c)
        assert '200 OK' == r.status
        data = r.data.decode('utf8')
        assert 'not logged in' in data
Example #14
0
async def root(request, user, watchlist):
    watchlist_movies = []
    for item in sorted(watchlist.movie_items,
                       key=lambda x: x.date,
                       reverse=True):
        movie = WatchlistMovie(imdb_id=item.imdb_id, in_watchlist=True)
        watchlist_movies.append(movie)

    await Movie.batch_populate_details(watchlist_movies)

    history = []
    # For some reason watch_history does not preload properly
    with db_session():
        user = User.get(id=user.id)
        for item in sorted(user.watch_history,
                           key=lambda x: x.date,
                           reverse=True):
            movie = HistoryMovie(imdb_id=item.imdb_id,
                                 id=item.id,
                                 date=item.date,
                                 rating=item.rating)
            history.append(movie)

    await Movie.batch_populate_details(history)

    template = request.app.env.get_template("watchlist.html")
    rendered = template.render(user=user,
                               watchlist=watchlist_movies,
                               history=history)
    return response.html(rendered)
Example #15
0
    def connect(self, *args, **kwargs):
        create_tables = kwargs.pop('create_tables', True)
        self._db.bind(*args, **kwargs)
        self._db.generate_mapping(create_tables=create_tables)

        with orm.db_session():
            self._db.Location.get_root()  # ensure that the root exists.
Example #16
0
def send_verification_email(request, token):
    global url_safe_serializer
    try:
        user_id = url_safe_serializer.loads(token, max_age=10)
    except Exception as e:
        response_html = GenericResponse.token_invalid(request.app)
        return html(response_html)

    verification_token = url_safe_serializer.dumps(user_id)
    with db_session():
        user = User.get(id=user_id)
    if not user:
        return html("<p>Something went wrong...</p><a href='/'>")

    verification_url = "{host}{location}".format(host=config.SERVER_ENDPOINT,
                                                 location=request.app.url_for(
                                                     "auth.verify_email",
                                                     token=verification_token))

    subject = "MovieCheck - Verify your email"
    template = request.app.env.get_template("emails/confirm_email.html")
    rendered = template.render(cta_url=verification_url)
    email_html = transform(rendered)
    text = "Verify your email: {}".format(verification_url)

    success = request.app.debug or send_email(user.email, subject, email_html,
                                              text)
    if not success:
        user.delete()
        return redirect(
            request.app.url_for("register_page") + "?failure=invalid_email")

    return html(
        GenericResponse.verification_email_success(
            request.app, debug_link=verification_url))
Example #17
0
def main():
  args = parser.parse_args()

  import conf from '../conf'
  if args.production:
    conf.debug = False
  elif args.development:
    conf.debug = True

  import models, {db} from './models'
  import {app, sio, init as init_app} from './app'

  if args.drop_all:
    print('Aye, sir! Dropping all our data out tha window!')
    db.drop_all_tables(with_all_data=True)
    return
  if args.update_song_metadata:
    with db_session():
      songs = models.Song.select()
      num_songs = len(songs)
      for index, song in enumerate(songs):
        print('\r{}/{}'.format(index, num_songs), end='')
        if isinstance(song, models.YtSong):
          models.YtSong.from_video_id(song.video_id)
        else:
          ... # TODO
      print()
      return

  init_app()
  sio.run(app, host=conf.host, port=conf.port, debug=conf.debug)
Example #18
0
def create_application(config=None):
    global app

    # Flask!
    app = Flask(__name__)
    app.config.from_object('supysonic.config.DefaultConfig')

    if not config:  # pragma: nocover
        config = IniConfig.from_common_locations()
    app.config.from_object(config)

    # Set loglevel
    logfile = app.config['WEBAPP']['log_file']
    if logfile:  # pragma: nocover
        from logging.handlers import TimedRotatingFileHandler
        handler = TimedRotatingFileHandler(logfile, when='midnight')
        handler.setFormatter(
            logging.Formatter("%(asctime)s [%(levelname)s] %(message)s"))
        logger.addHandler(handler)
    loglevel = app.config['WEBAPP']['log_level']
    if loglevel:
        logger.setLevel(getattr(logging, loglevel.upper(), logging.NOTSET))

    # Initialize database
    init_database(app.config['BASE']['database_uri'])
    app.wsgi_app = db_session(app.wsgi_app)

    # Insert unknown mimetypes
    for k, v in app.config['MIMETYPES'].items():
        extension = '.' + k.lower()
        if extension not in mimetypes.types_map:
            mimetypes.add_type(v, extension, False)

    # Initialize Cache objects
    # Max size is MB in the config file but Cache expects bytes
    cache_dir = app.config['WEBAPP']['cache_dir']
    max_size_cache = app.config['WEBAPP']['cache_size'] * 1024**2
    max_size_transcodes = app.config['WEBAPP']['transcode_cache_size'] * 1024**2
    app.cache = Cache(path.join(cache_dir, "cache"), max_size_cache)
    app.transcode_cache = Cache(path.join(cache_dir, "transcodes"),
                                max_size_transcodes)

    # Test for the cache directory
    cache_path = app.config['WEBAPP']['cache_dir']
    if not path.exists(cache_path):
        makedirs(cache_path)  # pragma: nocover

    # Read or create secret key
    app.secret_key = get_secret_key('cookies_secret')

    # Import app sections
    if app.config['WEBAPP']['mount_webui']:
        from .frontend import frontend
        app.register_blueprint(frontend)
    if app.config['WEBAPP']['mount_api']:
        from .api import api
        app.register_blueprint(api, url_prefix='/rest')

    return app
Example #19
0
def create_application(config=None):
    global app

    # Flask!
    app = Flask(__name__, static_url_path='/music/static')
    app.config.from_object('supysonic.config.DefaultConfig')
    #    static_folder

    if not config:  # pragma: nocover
        config = IniConfig.from_common_locations()
    app.config.from_object(config)

    # Set loglevel
    logfile = app.config['WEBAPP']['log_file']
    if logfile:  # pragma: nocover
        from logging.handlers import TimedRotatingFileHandler
        handler = TimedRotatingFileHandler(logfile, when='midnight')
        handler.setFormatter(
            logging.Formatter("%(asctime)s [%(levelname)s] %(message)s"))
        logger.addHandler(handler)
    loglevel = app.config['WEBAPP']['log_level']
    if loglevel:
        logger.setLevel(getattr(logging, loglevel.upper(), logging.NOTSET))

    # Initialize database
    init_database(app.config['BASE']['database_uri'])
    app.wsgi_app = db_session(app.wsgi_app)

    # Insert unknown mimetypes
    for k, v in app.config['MIMETYPES'].items():
        extension = '.' + k.lower()
        if extension not in mimetypes.types_map:
            mimetypes.add_type(v, extension, False)

    # Test for the cache directory
    cache_path = app.config['WEBAPP']['cache_dir']
    if not path.exists(cache_path):
        makedirs(cache_path)  # pragma: nocover

    # Read or create secret key
    secret_path = path.join(cache_path, 'secret')
    if path.exists(secret_path):
        with io.open(secret_path, 'rb') as f:
            app.secret_key = f.read()
    else:
        secret = urandom(128)
        with io.open(secret_path, 'wb') as f:
            f.write(secret)
        app.secret_key = secret

    # Import app sections
    if app.config['WEBAPP']['mount_webui']:
        from .frontend import frontend
        app.register_blueprint(frontend, url_prefix='/music')
    if app.config['WEBAPP']['mount_api']:
        from .api import api
        app.register_blueprint(api, url_prefix='/music/rest')

    return app
Example #20
0
def get_experiments_trials(exp_id: int, trial_id: int, response):
    with orm.db_session():
        expr = Experiment[exp_id]
        trial = Trial[trial_id]
        if trial in expr.trials:
            return trial.summary()
        else:
            raise falcon.HTTPNotFound()
Example #21
0
async def verify_email(request, token):
    global url_safe_serializer
    token_issue = None
    try:
        user_id = url_safe_serializer.loads(token, max_age=60 * 60 * 24)
    except SignatureExpired as e:
        user_id = url_safe_serializer.loads(token)
        with db_session():
            User.get(id=user_id).delete()
        return html(GenericResponse.token_expired(request.app))
    except Exception as e:
        return html(GenericResponse.token_invalid(request.app))

    with db_session():
        User[user_id].set(is_verified=True)

    return html(GenericResponse.email_verify_success(request.app))
Example #22
0
def crear_admin_user(name, email, password):
    with db_session():
        now = datetime.now()
        admin = Profesor(nombre=name, email=email,
                    username="******", role="admin",
                    fecha_creacion=now, fecha_modificacion=now,
                    password=bcrypt.hash(password))
        commit()
Example #23
0
def test_permissions(clij, clik, ponydb):

    # test bad day
    with orm.db_session():
        a = ponydb.Acte[16]
        a.created = a.created - timedelta(days=1)
    r = clik.delete(app.reverse_url("ordonnances:delete", acte_id=16))
    assert r.json() == "Un acte ne peut être modifié en dehors du jours même"
Example #24
0
def put_experiments(exp_id: int, body, response):
    with orm.db_session():
        expr = Experiment[exp_id]
        if 'name' in body:
            expr.name = body['name']
        if 'owner' in body:
            expr.owner = User[body['owner']]
        return expr.summary()
Example #25
0
def patient_all():
    if request.method == "GET":
        records = Patient.select()
        return template("index.html", items=records)

    elif request.method == "POST":
        json = request.POST

        # check to see if request has provided all necessary values to create object
        #

        name = json["pname"]
        surname = json["surname"]
        dob = json["dob"]
        gp = GP[app.config["user_id"]]
        status = True
        city = json["city"]
        district = json["district"]

        address = Address(cityName=city,
                          districtName=district,
                          districtCode="fix this")

        patient = Patient(name=name,
                          sex="fix this",
                          dob=dob,
                          address=address,
                          gp=gp,
                          status=status)
        #save progress
        db.commit()

        #redirect to the newly created patient's page
        redirect("/patient/{}".format(patient.id))

    elif request.method == "DELETE":

        patient_id = request.POST.id.strip()
        try:
            patient = Patient[patient_id]
        except:
            return "Patient does not exist"
        patient.delete()

    elif request.method == "PUT":
        json = request.POST
        update = json["update"]
        value = json["value"]
        id = json["id"]
        with db_session():
            try:
                patient = Patient[id]
            except:
                return {"message": 'patient not found'}
            setattr(patient, update, value)
            return {"message": "patient updated"}
    return {"message": "missing token"}
Example #26
0
def init():
  # Queue all current songs in all rooms.
  with db_session():
    for room in models.Room.select():
      room.update_song()
      room.add_to_schedule()

  # TODO: Don't start scheduler in the Werkzeug reloader process.
  models.room_update_schedule.start(daemon=True)
Example #27
0
    def __init__(cls, name, bases, clsdict):
        assert len(bases) < 2  # no multiple inheritance for commands
        if bases:
            bases[0].subcommands[cls.name] = cls
        super().__init__(name, bases, clsdict)

        cls.subcommands = OrderedDict()
        if cls.db_required:
            cls.eval = db_session(cls.eval)
Example #28
0
 def get_last_status(self):
     with db_session():
         records = list(self.records.order_by(lambda x: x.id))
         if records:
             record = records[-1]
             print('***', record.created_at)
             return record.status
         else:
             return 'not_available'
Example #29
0
def create_application(config = None):
    global app

    # Flask!
    app = Flask(__name__)
    app.config.from_object('supysonic.config.DefaultConfig')

    if not config: # pragma: nocover
        config = IniConfig.from_common_locations()
    app.config.from_object(config)

    # Set loglevel
    logfile = app.config['WEBAPP']['log_file']
    if logfile: # pragma: nocover
        from logging.handlers import TimedRotatingFileHandler
        handler = TimedRotatingFileHandler(logfile, when = 'midnight')
        handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] %(message)s"))
        logger.addHandler(handler)
    loglevel = app.config['WEBAPP']['log_level']
    if loglevel:
        logger.setLevel(getattr(logging, loglevel.upper(), logging.NOTSET))

    # Initialize database
    init_database(app.config['BASE']['database_uri'])
    app.wsgi_app = db_session(app.wsgi_app)

    # Insert unknown mimetypes
    for k, v in app.config['MIMETYPES'].items():
        extension = '.' + k.lower()
        if extension not in mimetypes.types_map:
            mimetypes.add_type(v, extension, False)

    # Initialize Cache objects
    # Max size is MB in the config file but Cache expects bytes
    cache_dir = app.config['WEBAPP']['cache_dir']
    max_size_cache = app.config['WEBAPP']['cache_size'] * 1024**2
    max_size_transcodes = app.config['WEBAPP']['transcode_cache_size'] * 1024**2
    app.cache = Cache(path.join(cache_dir, "cache"), max_size_cache)
    app.transcode_cache = Cache(path.join(cache_dir, "transcodes"), max_size_transcodes)

    # Test for the cache directory
    cache_path = app.config['WEBAPP']['cache_dir']
    if not path.exists(cache_path):
        makedirs(cache_path) # pragma: nocover

    # Read or create secret key
    app.secret_key = get_secret_key('cookies_secret')

    # Import app sections
    if app.config['WEBAPP']['mount_webui']:
        from .frontend import frontend
        app.register_blueprint(frontend)
    if app.config['WEBAPP']['mount_api']:
        from .api import api
        app.register_blueprint(api, url_prefix = '/rest')

    return app
Example #30
0
async def add(request, user, watchlist, movie_id):
    logger.debug(movie_id)
    if not is_in_default_watchlist(movie_id, user):
        with db_session():
            watchlist = Watchlist.get(id=watchlist.id)
            item = MovieItem(imdb_id=movie_id,
                             date=datetime.now(),
                             watchlist=watchlist)
    return response.empty()
Example #31
0
 def decorated_function(request, user, *args, **kwargs):
     with db_session(strict=False):
         watchlist = Watchlist.select(
             lambda w: w.user.id == user.id).first()
         if not watchlist:
             user = User.get(id=user.id)
             watchlist = Watchlist(user=user, is_default=True)
         watchlist.movie_items.load()
     return f(request, user, watchlist, *args, **kwargs)
Example #32
0
def configure_errorpages(app):
    from flask import render_template
    from pony.orm import db_session

    def _error_common(template, template_modal, code, e):
        # g.is_ajax здесь не всегда присутствует, так что так
        is_ajax = request.headers.get('X-AJAX') == '1' or request.args.get('isajax') == '1'
        if is_ajax:
            html = render_template(template_modal, error=e, error_code=code)
            response = jsonify({'page_content': {'modal': html}})
            response.status_code = code
            # for github-fetch polyfill:
            response.headers['X-Request-URL'] = iri_to_uri(request.url)
            return response

        html = render_template(template, error=e, error_code=code)
        return html, code

    def _page403(e):
        return _error_common('403.html', '403_modal.html', 403, e)

    def _page404(e):
        return _error_common('404.html', '404_modal.html', 404, e)

    def _page500(e):
        return _error_common('500.html', '500_modal.html', 500, e)

    def _pagecsrf(e):
        return _error_common('csrf.html', 'csrf_modal.html', 400, e)

    def _pageall(e):
        if e.code and e.code < 400:
            return e
        return _error_common('error.html', 'error_modal.html', e.code or 500, e)

    app.errorhandler(403)(db_session(_page403))
    app.errorhandler(404)(db_session(_page404))
    app.errorhandler(500)(db_session(_page500))
    app.errorhandler(CSRFError)(db_session(_pagecsrf))
    app.errorhandler(HTTPException)(db_session(_pageall))
def similarity_search_molecules_core(**kwargs):
    molecules = SDFread(kwargs['input'])
    outputdata = SDFwrite(kwargs['output'])
    num = kwargs['number']
    rebuild = kwargs['rebuild']
    with db_session():
        x = TreeIndex(Molecules, reindex=rebuild)
        for molecule_container in molecules:
            a,b = TreeIndex.get_similar(x, molecule_container,num)
            print(a)
            print(b)
            for i in b:
                mol_cont = json_graph.node_link_graph(i.data)
                mol_cont.__class__ = MoleculeContainer
                outputdata.write(mol_cont)
def similarity_search_reactions_core(**kwargs):
    outputdata = RDFwrite(kwargs['output'])
    reactions = RDFread(kwargs['input'])
    num = kwargs['number']
    rebuild = kwargs['rebuild']
    with db_session():
        x = TreeIndex(Reactions, reindex=rebuild)
        for reaction_container in reactions:
            print(reaction_container)
            a,b = TreeIndex.get_similar(x, reaction_container, num)
            print(a)
            print(b)
            for i in b:
                react_cont = i.structure
                react_cont.__class__ = ReactionContainer
                outputdata.write(react_cont)
Example #35
0
    def put(self, uuid):
        '''
        In case of device application re-registered without deregistration
        (`DELETE`) first, we assume device application has deregistered
        already, but the deregistration request dropped or mis-ordered due to
        race condiction of http requests. We will do some clean up same as
        deregistration processl, then generate a new ``revision`` token.
        '''
        log.debug('Recieve UUID: %r', uuid)
        response = {
            'id': uuid,
            'state': 'ok',
        }

        try:
            self.verify_put_data(self.json)
        except AssertionError as e:
            self.send_error(403, msg=e.args[0])
            return

        try:
            with orm.db_session(serializable=True):
                resource = config.db.Resource.get_for_update(id=uuid)
                if resource is None:
                    resource = config.db.Resource(id=uuid, **self.json)
                else:
                    # case of device application re-registered
                    log.info('Device application re-registered: %r', uuid)
                    resource.revision = uuid4()
        except ValueError as e:
            self.send_error(403, msg=e.args[0])
            return
        else:
            # FIXME: do manull cleanup as deregistration here
            pass

        rev = response['rev'] = str(resource.revision)

        # TODO: check available server
        # but now we simply pick up first ``accept_protos``
        if resource.accept_protos[0].lower() == 'mqtt':
            response['url'] = self.mqtt_url
            response['ctrl_chans'] = self.mqtt_ctrl_chans(resource.id)
            sub_topic, pub_topic = response['ctrl_chans']
            iot_conn_mgr.mqtt_ctrl(resource.id, pub_topic, sub_topic, rev)

        self.write(response)
Example #36
0
def fill_database_core(**kwargs):
    it = iter(RDFread(kwargs['input']))
    chunksize = kwargs['chunksize']

    for x in itertools.zip_longest(*[it]*chunksize):
        if None in x:
            y = []
            for i in x:
                if i is None:
                    break
                y.append(i)
            x = y

        print(x)

        substrats_list = []
        products_list = []

        for i in x:
            for j in i['substrats']:
                substrats_list.append(j)
            for u in i['products']:
                products_list.append(u)

        print(substrats_list)
        print(products_list)

        substrats_fp = Molecules.get_fingerprints(substrats_list)
        products_fp = Molecules.get_fingerprints(products_list)
        reactions_fps = Reactions.get_fingerprints(x)

        with db_session():
            for i in range(0, len(substrats_list)):
                substrat_fear = Molecules.get_fear(substrats_list[i])
                if not Molecules.exists(fear=substrat_fear):
                    Molecules(substrats_list[i],substrats_fp[i])

            for i in range(0, len(products_list)):
                product_fear = Molecules.get_fear(products_list[i])
                if not Molecules.exists(fear=product_fear ):
                    Molecules(products_list[i], products_fp[i])

            for i in range(0, len(x)):
                react_fear = Reactions.get_fear(x[i])
                if not Reactions.exists(fear=react_fear):
                    Reactions(reaction=x[i],fingerprint=reactions_fps[i])
def structure_molecule_search_core(**kwargs):
    molecules = SDFread(kwargs['input'])
    outputdata = RDFwrite(kwargs['output'])
    product = kwargs['product']
    if kwargs['product'] == False and kwargs['reagent'] == False:
        product = None
    elif kwargs['product'] == True and kwargs['reagent'] == True:
        print('No,No,No')
    elif kwargs['product'] == True:
        product = True
    elif kwargs['reagent'] == True:
        product = False
    with db_session():
        for molecule in molecules:
            required_reacts = Reactions.get_reactions_by_molecule(molecule,product)
            print(required_reacts)
            for reaction in required_reacts:
                react_cont = reaction.structure
                print(react_cont)
                outputdata.write(react_cont)
Example #38
0
def create_application(config = None):
    global app

    # Flask!
    app = Flask(__name__)
    app.config.from_object('supysonic.config.DefaultConfig')

    if not config: # pragma: nocover
        config = IniConfig.from_common_locations()
    app.config.from_object(config)

    # Set loglevel
    logfile = app.config['WEBAPP']['log_file']
    if logfile: # pragma: nocover
        import logging
        from logging.handlers import TimedRotatingFileHandler
        handler = TimedRotatingFileHandler(logfile, when = 'midnight')
        loglevel = app.config['WEBAPP']['log_level']
        if loglevel:
            mapping = {
                'DEBUG':   logging.DEBUG,
                'INFO':    logging.INFO,
                'WARNING': logging.WARNING,
                'ERROR':   logging.ERROR,
                'CRTICAL': logging.CRITICAL
            }
            handler.setLevel(mapping.get(loglevel.upper(), logging.NOTSET))
        app.logger.addHandler(handler)

    # Initialize database
    init_database(app.config['BASE']['database_uri'])
    app.wsgi_app = db_session(app.wsgi_app)

    # Insert unknown mimetypes
    for k, v in app.config['MIMETYPES'].items():
        extension = '.' + k.lower()
        if extension not in mimetypes.types_map:
            mimetypes.add_type(v, extension, False)

    # Test for the cache directory
    cache_path = app.config['WEBAPP']['cache_dir']
    if not path.exists(cache_path):
        makedirs(cache_path) # pragma: nocover

    # Read or create secret key
    secret_path = path.join(cache_path, 'secret')
    if path.exists(secret_path):
        with io.open(secret_path, 'rb') as f:
            app.secret_key = f.read()
    else:
        secret = urandom(128)
        with io.open(secret_path, 'wb') as f:
            f.write(secret)
        app.secret_key = secret

    # Import app sections
    if app.config['WEBAPP']['mount_webui']:
        from .frontend import frontend
        app.register_blueprint(frontend)
    if app.config['WEBAPP']['mount_api']:
        from .api import api
        app.register_blueprint(api, url_prefix = '/rest')

    return app
Example #39
0
    route_base = '/recentcheckouts/'

    def index(self):
        '''Return items checked out in the past hour.'''
        hour_ago  = datetime.utcnow() - timedelta(hours=1)
        recent = orm.select(item for item in Item
                                if item.checked_out and
                                    item.updated > hour_ago)\
                                    .order_by(Item.updated.desc())[:]
        return jsonify({"items": ItemSerializer(recent, many=True).data})

@app.route("/")
def home():
    return render_template('index.html', orm="Pony ORM")

# Generate object-database mapping
db.generate_mapping(check_tables=False)

# Register views
api_prefix = "/api/v1/"
ItemsView.register(app, route_prefix=api_prefix)
PeopleView.register(app, route_prefix=api_prefix)
RecentCheckoutsView.register(app, route_prefix=api_prefix)


if __name__ == '__main__':
    db.create_tables()
    # Make sure each thread gets a db session
    app.wsgi_app = orm.db_session(app.wsgi_app)
    app.run(port=5000)
Example #40
0
    async def on_chat_message(self, message):
        if not is_text_message(message):
            return

        author_id = str(message['from']['id'])
        username = message['from']['username']

        message_id = message['message_id']
        chat_id = str(message['chat']['id'])
        timestamp = message['date']
        text = message['text']

        with self.pickle_file.open('rb') as pickle_file:
            try:
                last_pickled_messages = pickle.load(pickle_file)
            except EOFError:
                last_pickled_messages = {}
            try:
                last_message_ids = pickle.load(pickle_file)
            except EOFError:
                last_message_ids = {}

        with orm.db_session():
            author = Author.get(author_id=author_id)

            if not author:
                author = Author(author_id=author_id, username=username)
            else:
                author.username = username

            chat_message = ChatMessage(
                author=author,
                message_id=message_id,
                chat_id=chat_id,
                timestamp=timestamp,
                text=text,
            )

            db.commit()

            if any(t in text for t in self.settings['trigger_chat_commands']):
                last_message_id = last_message_ids.get(chat_id, 0)
                self.log.debug("Last message ID for chat ID {} is {}".format(
                    last_message_id, chat_id
                ))

                query = orm.select(
                    m.text
                    for m in ChatMessage
                    if m.chat_id == chat_id and
                    last_message_id > m.message_id)

                messages = [self.process_text(m) for m in query]

                try:
                    mc = MarkovChain.from_string(' '.join(messages))

                    # Merging unpickled data with a freshly created dict.
                    msgs = last_pickled_messages.get(chat_id, {})
                    self.log.debug("Unpickled {} messages for chat ID {}".format(
                        len(msgs), chat_id
                    ))
                    for k, v in last_pickled_messages.get(chat_id, {}).items():
                        values = v[:]
                        if k in mc.db:
                            values.extend(mc.db[k])
                        mc.db.update({k: values})

                    query = orm.select(
                        m.message_id
                        for m in ChatMessage
                        if m.chat_id == chat_id
                    ).order_by(ChatMessage.message_id.desc()).limit(1)

                    new_last_message_id = query.get()
                    if not new_last_message_id:
                        new_last_message_id = 0
                    self.log.debug("Setting new last message ID for chat ID {} to {}".format(
                        new_last_message_id, chat_id
                    ))

                    with self.pickle_file.open('wb') as pickle_file:
                        pickle.dump({chat_id: mc.db}, pickle_file, 4)
                        pickle.dump({chat_id: new_last_message_id}, pickle_file, 4)

                    return ' '.join(mc.generate_sentences())

                except:
                    return