Ejemplo n.º 1
0
    def thumbs_view(filename):
        try:
            img_file = None
            s_filename = secure_filename(filename)
            img_file = os.path.join(os.path.abspath(constants.dir_thumbs),
                                    s_filename)
            img_file = io_cmd.CoreFS(img_file)
            if not img_file.exists:
                img_file = None

            if img_file and img_file.path.endswith(constants.link_ext):
                img_file = io_cmd.CoreFS(utils.get_real_file(img_file.path))
                if not img_file.exists:
                    img_file = None

            if img_file:
                mimetype, _ = mimetypes.guess_type(img_file.path)
                if mimetype:
                    with img_file.open("rb") as fp:
                        return send_file(io.BytesIO(fp.read()),
                                         conditional=True,
                                         mimetype=mimetype)
        except Exception:
            log.exception("Exception was raised during thumbnail retrieval")
        abort(404)
Ejemplo n.º 2
0
 def data(self, load_values=False, load_collections=False):
     self._before_data()
     d = {}
     path = io_cmd.CoreFS(self.item.path)
     d['id'] = self.item.id
     if path.ext == constants.link_ext:
         d['ext'] = io_cmd.CoreFS(self.item.path[:-len(path.ext)]).ext
     else:
         d['ext'] = path.ext
     if self._local_url:
         _, tail = os.path.split(path.get())
         # TODO: make sure path is in static else return actual path
         if tail:
             furl = constants.thumbs_view + '/' + tail
             d['data'] = furl
         else:
             d['data'] = ""
     else:
         im = ""
         if path.exists:
             with path.open("rb") as f:
                 im = utils.imagetobase64(f.read())
         if self._uri:
             im = im.replace('\n', '')
             im = "data:image/{};base64,".format(path.ext[1:]) + im
         d['data'] = im
     d['size'] = self.item.size
     d['timestamp'] = self.item.timestamp.timestamp if self.item.timestamp else None
     return d
Ejemplo n.º 3
0
def scan_galleries(path: str, scan_options: dict = {}):
    """
    Scan for galleries in the given directory/archive

    Args:
        path: path to directory/archive that exists on this system
        scan_options: options to apply to the scanning process, see :ref:`Settings` for available scanning options

    Returns:
        .. code-block:: guess

            {
                'command_id': int,
                'view_id': int
            }

    |async command|

    |temp view|
    """
    path = io_cmd.CoreFS(path)
    if not path.exists:
        raise exceptions.CoreError(
            utils.this_function(),
            f"Path does not exists on this system: '{path.path}'")

    view_id = next(constants.general_counter)
    cmd_id = gallery_cmd.ScanGallery(services.AsyncService.generic).run(
        path, scan_options, view_id=view_id)

    return message.Identity('data', {'command_id': cmd_id, 'view_id': view_id})
Ejemplo n.º 4
0
def cmd_commands(args):
    if args.gen_config:
        config.config.save_default()
        log.i("Generated example configuration file at {}".format(
            io_cmd.CoreFS(constants.config_example_path).path), stdout=True)
        return True

    if args.create_user:
        print("============ Create User =============")
        uinfo = create_user_interactive()
        if uinfo:
            if db.create_user(uinfo['role'], uinfo['username'], uinfo.get("password", "")):
                print("Successfully created new user", uinfo['username'])
            else:
                print("User {} already exists".format(uinfo['username']))
        print("========== Create User End ===========")
        return True

    if args.delete_user:
        if db.delete_user(args.delete_user):
            print("Successfully deleted user", args.delete_user)
        else:
            print("User {} does not exist".format(args.delete_user))
        return True

    if args.list_users:
        for u in db.list_users(limit=20, offset=args.list_users - 1):
            print("{}\t[{}]".format(u.name, u.role.value))
        return True
Ejemplo n.º 5
0
    def main(self, path: typing.Union[str, io_cmd.CoreFS], options: dict={},
             view_id: int=None, auto_add: bool=False) -> typing.List[io_cmd.GalleryFS]:
        fs = io_cmd.CoreFS(path)
        galleries = []
        self.set_progress(title=fs.path, text=fs.path, type_=enums.ProgressType.GalleryScan)
        self.set_max_progress(1)
        if fs.is_dir or fs.is_archive:
            scan_options = _get_scan_options()
            scan_options.update(options)
            found_paths = set()
            if fs.exists:
                with self._discover.call(fs.path, scan_options) as plg:
                    for p in plg.all(default=True):
                        [found_paths.add(os.path.normpath(x)) for x in p]

            paths_len = len(found_paths)
            log.d("Found", paths_len, "gallery candidates")

            self.set_max_progress(paths_len, add=True)
            view_id = view_id if view_id else constants.default_temp_view_id
            galleries = self._generate_gallery_fs(found_paths, scan_options).get()
            [x.add(view_id=view_id) for x in galleries]
            if auto_add:
                raise NotImplementedError
                #add_cmd = AddGallery()

        self.next_progress(text="")

        return galleries
Ejemplo n.º 6
0
def page_generate(rar_p, in_queue, out_pipe):
    rarfile.UNRAR_TOOL = rar_p
    item_id, items = in_queue.get()
    stuff_to_send = []
    items_len = len(items)
    name = random.randint(1, 100)
    for n_item, item in enumerate(items, 1):
        gallery, ch_inarchive, ch_path, path, g_path = item
        pages = []
        page_hash = None
        try:
            if ch_inarchive:
                page_hash = (g_path, ch_path)
                afs = io_cmd.CoreFS(path)
                try:
                    if ch_path:
                        afs._init_archive()
                        afs = io_cmd.CoreFS(afs._archive.path_separator.join((path, ch_path)), afs._archive)
                        
                    n = 1
                    for c in sorted(afs.contents()):
                        if c.is_image:
                            pages.append((c.name, c.path, n, True))
                            n += 1
                finally:
                    afs.close()
            else:
                page_hash = (ch_path,)
                dir_images = [x.path for x in os.scandir(g_path) if not x.is_dir() and x.name.endswith(io_cmd.CoreFS.image_formats())]
                for n, x in enumerate(sorted(dir_images), 1):
                    x = io_cmd.CoreFS(x)
                    pages.append((x.name, x.path, n, False))
        except NotImplementedError:
            pass
        except rarfile.RarCannotExec:
            print("RAR file not supported, skipping: {}".format(g_path))
        except:
            print("An unknown error occured, skipping: {}".format(g_path))
            pass

        stuff_to_send.append((page_hash, gallery, pages))
        if len(stuff_to_send) > 5 or n_item == items_len:
            out_pipe.send(stuff_to_send.copy())
            stuff_to_send.clear()
    out_pipe.send(item_id)
Ejemplo n.º 7
0
def start(argv=None, db_kwargs={}):
    if argv is None:
        argv = sys.argv[1:]
    utils.setup_dirs()
    args = parser.parse_args(argv)
    utils.parse_options(args)

    if not args.only_web:
        db.init(**db_kwargs)
        command.init_commands()
        hlogger.Logger.init_listener(args)
        monkey.patch_all(thread=False)

    hlogger.Logger.setup_logger(args)

    if args.generate_config:
        constants.config.save()
        log.i("Generated configuration file at '{}'".format(
            io_cmd.CoreFS(constants.settings_file).path),
              stdout=True)
        return

    log.i("HPX SERVER START")

    if not args.only_web:
        constants.available_commands = command.get_available_commands()

        services.init_generic_services()

        if not args.safe:
            plugins.plugin_loader(constants.dir_plugin)
        else:
            plugins.registered.init_plugins()

    log.i("Starting webserver... ({}:{})".format(constants.host_web,
                                                 constants.port_web),
          stdout=True)
    web_args = (constants.host_web, constants.port_web,
                constants.dev if args.only_web else False)
    if args.only_web:
        server.WebServer().run(*web_args)
    else:
        Process(target=server.WebServer().run,
                args=web_args,
                kwargs={
                    'logging_queue': hlogger.Logger._queue,
                    'logging_args': args
                },
                daemon=True).start()
        server.HPServer().run(interactive=args.interact)

    if not args.only_web:
        constants.config.save()
        hlogger.Logger.shutdown_listener()
    log.i("HPX SERVER END")
Ejemplo n.º 8
0
    def save(self, filepath, decode_unicode=False, extension=False):
        """
        Save the content to a file. Also accepts CoreFS.

        Args:
            extension: append file extension from url to filepath

        Returns str path to file
        """
        assert isinstance(filepath, (str, io_cmd.CoreFS))
        log.i("Saving content to file", self._rsp.url)
        if isinstance(filepath, str):
            filepath = io_cmd.CoreFS(filepath)

        if extension:
            filepath = io_cmd.CoreFS(
                filepath.path + io_cmd.CoreFS(os.path.split(self._url)[1]).ext,
                filepath._archive)

        self.set_max_progress(
            int(self._rsp.headers.get('Content-Length', '0').strip()) + 1)
        self.set_progress(type_=enums.ProgressType.Request)
        log.d("Saving to filepath", filepath)
        with filepath.open(mode="wb") as f:
            if self.properties.stream:
                s_time = arrow.now()
                dl_length = 0
                for data in self._rsp.iter_content(
                        chunk_size=1024, decode_unicode=decode_unicode):
                    data_len = len(data)
                    dl_length += data_len
                    self.next_progress(
                        data_len,
                        text="[{0:.3f} mbps] - {1}".format(
                            (dl_length / 1000000) / max(
                                (arrow.now() - s_time).seconds, 1), self._url))
                    f.write(data)
                    f.flush()
            else:
                raise NotImplementedError
        self.next_progress()
        return filepath.path
Ejemplo n.º 9
0
    def main(self, model: db.Base, item_id: int,
             image_size: enums.ImageSize) -> db.Profile:
        self.model = model

        if image_size == enums.ImageSize.Original:
            image_size = utils.ImageSize(0, 0)
        else:
            image_size = utils.ImageSize(
                *constants.image_sizes[image_size.name.lower()])

        with self.models.call() as plg:
            for p in plg.all(default=True):
                self._supported_models.update(p)

        if self.model not in self._supported_models:
            raise exceptions.CommandError(
                utils.this_command(self),
                "Model '{}' is not supported".format(model))

        img_hash = io_cmd.ImageItem.gen_hash(model, image_size, item_id)

        generate = True
        sess = constants.db_session()

        profile_size = str(tuple(image_size))

        self.cover = sess.query(db.Profile).filter(
            db.and_op(db.Profile.data == img_hash,
                      db.Profile.size == profile_size)).first()

        old_img_hash = None
        if self.cover:
            if io_cmd.CoreFS(self.cover.path).exists:
                generate = False
            else:
                old_img_hash = self.cover.data

        self.next_progress()
        if not generate:
            model_name = db.model_name(model)
            with self.invalidate.call_capture(model_name, model_name, item_id,
                                              image_size) as plg:
                if plg.first_or_default():
                    generate = True

        self.next_progress()
        if generate:
            constants.task_command.thumbnail_cleaner.wake_up()
            self.cover = self.run_native(self._generate_and_add, img_hash,
                                         old_img_hash, generate, model,
                                         item_id, image_size,
                                         profile_size).get()
        self.cover_event.emit(self.cover)
        return self.cover
Ejemplo n.º 10
0
    def main(self, model: db.Base, item_id: int,
             image_size: enums.ImageSize) -> db.Profile:

        self.model = model

        if image_size == enums.ImageSize.Original:
            image_size = utils.ImageSize(0, 0)
        else:
            image_size = utils.ImageSize(
                *constants.image_sizes[image_size.name.lower()])

        with self.models.call() as plg:
            for p in plg.all(default=True):
                self._supported_models.update(p)

        if self.model not in self._supported_models:
            raise exceptions.CommandError(
                utils.this_command(self),
                "Model '{}' is not supported".format(model))

        img_hash = io_cmd.ImageItem.gen_hash(model, image_size, item_id)

        generate = True
        sess = constants.db_session()
        self.cover = sess.query(
            db.Profile).filter(db.Profile.data == img_hash).one_or_none()

        if self.cover:
            if io_cmd.CoreFS(self.cover.path).exists:
                generate = False
        else:
            self.cover = db.Profile()

        if generate:
            model_name = db.model_name(model)
            with self.generate.call_capture(model_name, model_name, item_id,
                                            image_size) as plg:
                self.cover.path = plg.first()

            self.cover.data = img_hash
            self.cover.size = str(tuple(image_size))

        if self.cover.path and generate:
            i = GetModelItemByID().run(model, {item_id})[0]
            i.profiles.append(self.cover)
            sess.commit()
        elif not self.cover.path:
            self.cover = None

        self.cover_event.emit(self.cover)
        return self.cover
Ejemplo n.º 11
0
    def _find_galleries(path, options):
        path = io_cmd.CoreFS(path)
        archive_formats = io_cmd.CoreFS.archive_formats()
        found_galleries = []
        if path.is_archive or path.inside_archive:
            raise NotImplementedError
        else:
            contents = os.scandir(str(path))
            for p in contents:
                if p.is_file() and not p.path.endswith(archive_formats):
                    continue
                found_galleries.append(os.path.abspath(p.path))

        return tuple(found_galleries)
Ejemplo n.º 12
0
    def _resolve_gallery(gallery, number):
        parent = ""
        child = ""
        if gallery.single_source:
            if gallery.pages.count():
                # TODO: when number 1 doesnt exist?
                first_page = gallery.pages.filter(db.Page.number == number).first()
                if first_page:
                    if first_page.in_archive:
                        p = io_cmd.CoreFS(first_page.path)
                        parent = p.archive_path
                        child = p.path
                    else:
                        child = first_page.path
                        parent = os.path.split(first_page.path)[0]
        else:
            raise NotImplementedError

        return parent, child
Ejemplo n.º 13
0
    def _update_db(self, stale_cover, item_id, model, old_hash):
        log.d("Updating profile for database item", model)
        s = constants.db_session()
        cover = s.query(db.Profile).filter(
            db.and_op(db.Profile.data == old_hash,
                      db.Profile.size == stale_cover.size)).all()

        if len(cover) > 1:
            cover, *cover_ex = cover
            for x in cover_ex:
                s.delete(x)
        elif cover:
            cover = cover[0]

        new = False

        if cover:
            # sometimes an identical img has already been generated and exists so we shouldnt do anything
            fs = io_cmd.CoreFS(cover.path)
            if (cover.path != stale_cover.path) and fs.exists:
                fs.delete()
        else:
            cover = db.Profile()
            new = True

        cover.data = stale_cover.data
        cover.path = stale_cover.path
        cover.size = stale_cover.size

        if new or not s.query(db.Profile).join(
                db.relationship_column(model, db.Profile)).filter(
                    db.and_op(db.Profile.id == cover.id, model.id
                              == item_id)).scalar():
            log.d("Adding new profile to database item", model,
                  "()".format(item_id))
            i = s.query(model).get(item_id)
            i.profiles.append(cover)

        s.commit()
        self.next_progress()
Ejemplo n.º 14
0
    def main(self, model: db.Base, item_id: int,
             image_size: enums.ImageSize) -> db.Profile:

        self.model = model

        if image_size == enums.ImageSize.Original:
            image_size = utils.ImageSize(0, 0)
        else:
            image_size = utils.ImageSize(
                *constants.image_sizes[image_size.name.lower()])

        with self.models.call() as plg:
            for p in plg.all(default=True):
                self._supported_models.update(p)

        if self.model not in self._supported_models:
            raise exceptions.CommandError(
                utils.this_command(self),
                "Model '{}' is not supported".format(model))

        img_hash = io_cmd.ImageItem.gen_hash(model, image_size, item_id)

        cover_path = ""
        generate = True
        sess = constants.db_session()
        self.cover = sess.query(
            db.Profile).filter(db.Profile.data == img_hash).one_or_none()

        if self.cover:
            if io_cmd.CoreFS(self.cover.path).exists:
                generate = False
            else:
                cover_path = self.cover.path
        if generate:
            self.cover = self.run_native(self._generate_and_add, img_hash,
                                         generate, cover_path, model, item_id,
                                         image_size).get()
        self.cover_event.emit(self.cover)
        return self.cover
Ejemplo n.º 15
0
def start():
    utils.setup_dirs()
    args = parser.parse_args()
    utils.parse_options(args)
    utils.setup_logger(args)

    if args.generate_config:
        constants.config.save()
        print("Generated configuration file at '{}'".format(
            io_cmd.CoreFS(constants.settings_file).path))
        return

    log.i("HPX SERVER START")

    if not args.only_web:
        constants.available_commands = command.get_available_commands()
        constants.core_plugin = plugins._plugin_load(
            "happypanda.core.coreplugin", "core", _logger=log)

        if not args.safe:
            plugins.plugin_loader(constants.dir_plugin)
        else:
            plugins.registered.init_plugins()

    log.i("Starting webserver... ({}:{})".format(constants.host_web,
                                                 constants.port_web),
          stdout=True)
    web_args = (constants.host_web, constants.port_web,
                constants.dev if args.only_web else False)
    if args.only_web:
        server.WebServer().run(*web_args)
    else:
        Process(target=server.WebServer().run, args=web_args,
                daemon=True).start()
        server.HPServer().run(interactive=args.interact)

    if not args.only_web:
        constants.config.save()
    log.i("HPX SERVER END")
Ejemplo n.º 16
0
def start(argv=None, db_kwargs={}):
    assert sys.version_info >= (3, 6), "Python 3.6 and up is required"
    e_code = None
    e_num = 0
    try:
        utils.setup_online_reporter()
        log.i("HPX START")
        log.i("Version:", constants.version_str)
        log.i("DB Version:", constants.version_db_str)
        log.i("Web Version:", constants.version_web_str)
        if argv is None:
            argv = sys.argv[1:]
        utils.setup_dirs()
        args = parser.parse_args(argv)
        utils.parse_options(args)
        # setup logger without multiprocessing
        hlogger.Logger.setup_logger(args, main=True, dev=constants.dev, debug=config.debug.value)
        utils.enable_loggers(config.enabled_loggers.value)
        db_inited = False
        if constants.dev:
            log.i("DEVELOPER MODE ENABLED", stdout=True)
        else:
            pdb.set_trace = lambda: None  # disable pdb
        if config.debug.value:
            log.i("DEBUG MODE ENABLED", stdout=True)
        log.i(utils.os_info())

        if not args.only_web:
            db_inited = db.init(**db_kwargs)
            command.setup_commands()
        else:
            db_inited = True

        if cmd_commands(args):
            return

        if not args.only_web:  # can't init earlier because of cmd_commands
            hlogger.Logger.init_listener(args=args, debug=config.debug.value, dev=constants.dev)

        # setup logger with multiprocessing
        hlogger.Logger.setup_logger(
            args,
            main=True,
            dev=constants.dev,
            debug=config.debug.value,
            logging_queue=hlogger.Logger._queue)

        # invalidate all cache
        if constants.dev or config.debug.value:
            for n, c in constants.cache_regions.items():
                c.invalidate()

        update_state = check_update() if not (not constants.is_frozen and constants.dev) else None

        if not update_state == constants.UpdateState.Installing.value and db_inited:

            utils.setup_i18n()

            if not args.only_web:
                constants.available_commands = command.get_available_commands()

                services.setup_generic_services()

                constants.plugin_manager = plugins.PluginManager()

                if not args.safe:
                    plugins.plugin_loader(constants.plugin_manager, constants.dir_plugin)
                    if config.plugin_dir.value:
                        plugins.plugin_loader(constants.plugin_manager, config.plugin_dir.value)

            constants.notification = server.ClientNotifications()

            # starting stuff
            services.Scheduler.generic.start()
            init_commands(args)

            log.i("Starting webserver... ({}:{})".format(config.host_web.value, config.port_web.value), stdout=True)
            web_args = (config.host_web.value, config.port_web.value)
            web_kwargs = {
                'dev': constants.dev,
                'debug': config.debug.value,
            }
            if args.only_web:
                server.WebServer().run(*web_args, **web_kwargs)
            else:
                web_kwargs.update({'logging_queue': hlogger.Logger._queue,
                                   'cmd_args': args, })
                constants.web_proc = Process(target=server.WebServer().run,
                                             args=web_args,
                                             kwargs=web_kwargs,
                                             daemon=True,
                                             name="gevent")
                constants.web_proc.start()
                hp_server = server.HPServer()
                meta_cmd.ShutdownApplication.shutdown.subscribe(hp_server.shutdown)
                meta_cmd.RestartApplication.restart.subscribe(hp_server.restart)
                meta_cmd.UpdateApplication.update.subscribe(hp_server.update)
                e_code = hp_server.run(interactive=args.interact)

        else:
            if db_inited:
                e_code = constants.ExitCode.Update
            else:
                e_code = constants.ExitCode.Exit

        io_cmd.CoreFS(constants.dir_temp).delete(ignore_errors=True)
        log.i("HPX END")

        if e_code == constants.ExitCode.Exit:
            log.i("Shutting down...", stdout=True)
        elif e_code == constants.ExitCode.Restart:
            log.i("Restarting...", stdout=True)
        if not args.only_web:
            config.config.save()
            services.Scheduler.shutdown_all()
            hlogger.Logger.shutdown_listener()

        hlogger.shutdown()

        # the gui will handle the restart
        if e_code == constants.ExitCode.Restart and not constants.from_gui:
            utils.restart_process()
        elif e_code == constants.ExitCode.Update and not constants.from_gui:
            utils.launch_updater()

    except Exception as e:
        if constants.web_proc:
            constants.web_proc.terminate()
        print(e)  # intentional
        e_num = 1
        if not isinstance(e, exceptions.CoreError):
            if config.report_critical_errors.value and not constants.dev and constants.is_frozen:
                rollbar.report_exc_info()
            raise
    return e_code.value if e_code else e_num
Ejemplo n.º 17
0
def source_exists(item_type: enums.ItemType = enums.ItemType.Gallery,
                  item_id: int = 0,
                  check_all: bool = False):
    """
    Check if gallery/page source exists on disk

    Args:
        item_type: possible items are :py:attr:`.ItemType.Gallery`, :py:attr:`.ItemType.Page`
        item_id: id of item
        check_all: goes through all pages and checks them, default behaviour is to only check parent files/folders. Only relevant for :py:attr:`.ItemType.Gallery`

    Returns:
        .. code-block:: guess

            {
                'exists' : bool
                'missing' : [
                    {'id': int, 'item_type': item_type},
                    ...
                    ]
            }

    """

    item_type = enums.ItemType.get(item_type)

    _, db_model = item_type._msg_and_model(
        (enums.ItemType.Gallery, enums.ItemType.Page))

    if item_type == enums.ItemType.Page:
        item = database_cmd.GetModelItems().run(db_model, {item_id},
                                                columns=(db.Page.path, ))
    elif item_type == enums.ItemType.Gallery:
        item = database_cmd.GetModelItems().run(
            db_model, {item_id}, columns=(db.Gallery.single_source, ))

    if not item:
        raise exceptions.DatabaseItemNotFoundError(
            utils.this_function(),
            "'{}' with id '{}' was not found".format(item_type.name, item_id))
    else:
        item = item[0]

    paths = {}
    not_empty = True
    if item_type == enums.ItemType.Page:
        paths[item_id] = (item[0], item_type.value)
    elif item_type == enums.ItemType.Gallery:
        s = constants.db_session()
        if item and not check_all:
            p = s.query(db.Page.path).filter(db.Gallery.id == item_id).first()
            if p:
                paths[item_id] = (os.path.split(p[0])[0], item_type.value)
            else:
                not_empty = True
        else:
            ps = s.query(
                db.Page.id,
                db.Page.path).filter(db.Page.gallery_id == item_id).all()
            for p in ps:
                paths[p[0]] = (p[1], enums.ItemType.Page.value)
            not_empty = bool(ps)

    missing = []
    for t_id in paths:
        src, t_type = paths[t_id]
        try:
            e = io_cmd.CoreFS(src).exists
        except exceptions.ArchiveExistError:
            e = False
        if not e:
            missing.append({'id': t_id, 'item_type': t_type})

    return message.Identity("exists", {
        'exists': not missing and not_empty,
        'missing': missing
    })