コード例 #1
0
    def execute_macro(self, name):
        if name != prefs['current_macro']:
            prefs['current_macro'] = name
            self.mark_current_macro()

        macro = prefs['macros'].get(name, None)
        if not macro:
            return

        log = GUILog()
        log.outputs.append(ANSIStream())
        try:
            if macro.get('execfromfile'):
                with open(macro['macrofile'], 'rU') as file:
                    self.execute(file, log)
            elif macro['program']:
                program = macro['program']
                encoding = self.get_encoding(program)
                if encoding:
                    program = program.encode(encoding)
                self.execute(program, log)
        except:
            log.exception(_('Failed to execute macro'))
            error_dialog(
                self.gui,
                _('Failed to execute macro'),
                _('Failed to execute macro, click "Show details" for more information.'
                  ),
                det_msg=log.plain_text,
                show=True)
コード例 #2
0
def single_covers(title, authors, identifiers, caches, tdir):
    patch_plugins()
    load_caches(caches)
    log = GUILog()
    results = Queue()
    worker = Thread(target=run_download, args=(log, results, Event()),
            kwargs=dict(title=title, authors=authors, identifiers=identifiers))
    worker.daemon = True
    worker.start()
    c = Counter()
    while worker.is_alive():
        try:
            plugin, width, height, fmt, data = results.get(True, 1)
        except Empty:
            continue
        else:
            name = plugin.name
            if plugin.can_get_multiple_covers:
                name += '{%d}'%c[plugin.name]
                c[plugin.name] += 1
            name = '%s,,%s,,%s,,%s.cover'%(name, width, height, fmt)
            with open(os.path.join(tdir, name), 'wb') as f:
                f.write(data)
            os.mkdir(os.path.join(tdir, name+'.done'))

    return log.dump()
コード例 #3
0
def single_identify(title, authors, identifiers):
    log = GUILog()
    patch_plugins()
    results = identify(log, Event(), title=title, authors=authors,
            identifiers=identifiers)
    return [metadata_to_opf(r) for r in results], [r.has_cached_cover_url for
        r in results], dump_caches(), log.dump()
コード例 #4
0
ファイル: worker.py プロジェクト: MarioJC/calibre
def single_identify(title, authors, identifiers):
    log = GUILog()
    patch_plugins()
    results = identify(log, Event(), title=title, authors=authors,
            identifiers=identifiers)
    return [metadata_to_opf(r) for r in results], [r.has_cached_cover_url for
        r in results], dump_caches(), log.dump()
コード例 #5
0
ファイル: worker.py プロジェクト: MarioJC/calibre
def single_covers(title, authors, identifiers, caches, tdir):
    patch_plugins()
    load_caches(caches)
    log = GUILog()
    results = Queue()
    worker = Thread(target=run_download, args=(log, results, Event()),
            kwargs=dict(title=title, authors=authors, identifiers=identifiers))
    worker.daemon = True
    worker.start()
    c = Counter()
    while worker.is_alive():
        try:
            plugin, width, height, fmt, data = results.get(True, 1)
        except Empty:
            continue
        else:
            name = plugin.name
            if plugin.can_get_multiple_covers:
                name += '{%d}'%c[plugin.name]
                c[plugin.name] += 1
            name = '%s,,%s,,%s,,%s.cover'%(name, width, height, fmt)
            with open(os.path.join(tdir, name), 'wb') as f:
                f.write(data)
            os.mkdir(os.path.join(tdir, name+'.done'))

    return log.dump()
コード例 #6
0
    def execute_button_clicked(self):
        try:
            log = GUILog()
            log.outputs.append(QtLogStream())
            log.outputs[1].html_log.connect(self.log_outputted)
            log.outputs.append(ANSIStream())

            if self.fromfile_checkbox.isChecked():
                self.load_file_button_clicked()

            program = self.program.toPlainText()
            encoding = self.ia.get_encoding(program)
            if encoding:
                program = program.encode(encoding)

            self.textBrowser.clear()

            self.ia.execute(program, log)
        except:
            log.exception('Failed to execute macro:')

            lineno = 0
            for tb in traceback.extract_tb(sys.exc_info()[2]):
                if tb[0] == '<string>':
                    lineno = tb[1]
            if 0 < lineno:
                self.program.go_to_line(lineno)
コード例 #7
0
ファイル: threaded_jobs.py プロジェクト: tokot/calibre
    def __init__(self,
                 type_,
                 description,
                 func,
                 args,
                 kwargs,
                 callback,
                 max_concurrent_count=1,
                 killable=True,
                 log=None):
        '''
        A job that is run in its own thread in the calibre main process

        :param type_: The type of this job (a string). The type is used in
        conjunction with max_concurrent_count to prevent too many jobs of the
        same type from running

        :param description: A user viewable job description

        :func: The function that actually does the work. This function *must*
        accept at least three keyword arguments: abort, log and notifications. abort is
        An Event object. func should periodically check abort.is_set() and if
        it is True, it should stop processing as soon as possible. notifications
        is a Queue. func should put progress notifications into it in the form
        of a tuple (frac, msg). frac is a number between 0 and 1 indicating
        progress and msg is a string describing the progress. log is a Log
        object which func should use for all debugging output. func should
        raise an Exception to indicate failure. This exception is stored in
        job.exception and can thus be used to pass arbitrary information to
        callback.

        :param args,kwargs: These are passed to func when it is called

        :param callback: A callable that is called on completion of this job.
        Note that it is not called if the user kills the job. Check job.failed
        to see if the job succeeded or not. And use job.log to get the job log.

        :param killable: If False the GUI wont let the user kill this job

        :param log: Must be a subclass of GUILog or None. If None a default
        GUILog is created.
        '''
        BaseJob.__init__(self, description)

        self.type = type_
        self.max_concurrent_count = max_concurrent_count
        self.killable = killable
        self.callback = callback
        self.abort = Event()
        self.exception = None

        kwargs['notifications'] = self.notifications
        kwargs['abort'] = self.abort
        self.log = GUILog() if log is None else log
        kwargs['log'] = self.log

        self.func, self.args, self.kwargs = func, args, kwargs
        self.consolidated_log = None
コード例 #8
0
ファイル: worker.py プロジェクト: MarioJC/calibre
def main(do_identify, covers, metadata, ensure_fields, tdir):
    failed_ids = set()
    failed_covers = set()
    all_failed = True
    log = GUILog()
    patch_plugins()

    for book_id, mi in metadata.iteritems():
        mi = OPF(BytesIO(mi), basedir=tdir,
                populate_spine=False).to_book_metadata()
        title, authors, identifiers = mi.title, mi.authors, mi.identifiers
        cdata = None
        log.clear()

        if do_identify:
            results = []
            try:
                results = identify(log, Event(), title=title, authors=authors,
                    identifiers=identifiers)
            except:
                pass
            if results:
                all_failed = False
                mi = merge_result(mi, results[0], ensure_fields=ensure_fields)
                identifiers = mi.identifiers
                if not mi.is_null('rating'):
                    # set_metadata expects a rating out of 10
                    mi.rating *= 2
                with open(os.path.join(tdir, '%d.mi'%book_id), 'wb') as f:
                    f.write(metadata_to_opf(mi, default_lang='und'))
            else:
                log.error('Failed to download metadata for', title)
                failed_ids.add(book_id)

        if covers:
            cdata = download_cover(log, title=title, authors=authors,
                    identifiers=identifiers)
            if cdata is None:
                failed_covers.add(book_id)
            else:
                with open(os.path.join(tdir, '%d.cover'%book_id), 'wb') as f:
                    f.write(cdata[-1])
                all_failed = False

        with open(os.path.join(tdir, '%d.log'%book_id), 'wb') as f:
            f.write(log.plain_text.encode('utf-8'))

    return failed_ids, failed_covers, all_failed
コード例 #9
0
ファイル: pdf.py プロジェクト: outrera/calibre-extract-isbn
def get_isbn(output_dir, pdf_name, log=None):
    is_running_on_fork = False
    if log is None:
        log = GUILog()
        is_running_on_fork = True
    try:
        total_pages = get_page_count(log, output_dir, pdf_name)
        if total_pages is not None:
            scanner = BookScanner(log)

            if total_pages <= FRONT_PAGES + BACK_PAGES:
                # No point in doing all the complexity of ranges
                text = call_convert_cmd(log, output_dir, pdf_name)
                scanner.look_for_identifiers_in_text([text])

            else:
                text = call_convert_cmd(log, output_dir, pdf_name, 1,
                                        FRONT_PAGES)
                scanner.look_for_identifiers_in_text([text])
                if not scanner.has_identifier():
                    text = call_convert_cmd(log, output_dir, pdf_name,
                                            total_pages - BACK_PAGES,
                                            total_pages)
                    scanner.look_for_identifiers_in_text([text])
        return scanner.get_isbn_result()
    finally:
        if is_running_on_fork:
            # We need to print our log out so the parent process can re-log it.
            print(log.html)
コード例 #10
0
ファイル: threaded_jobs.py プロジェクト: 089git/calibre
    def __init__(self,
            type_, description,

            func, args, kwargs,

            callback,

            max_concurrent_count=1,
            killable=True,
            log=None):
        '''
        A job that is run in its own thread in the calibre main process

        :param type_: The type of this job (a string). The type is used in
        conjunction with max_concurrent_count to prevent too many jobs of the
        same type from running

        :param description: A user viewable job description

        :func: The function that actually does the work. This function *must*
        accept at least three keyword arguments: abort, log and notifications. abort is
        An Event object. func should periodically check abort.is_set() and if
        it is True, it should stop processing as soon as possible. notifications
        is a Queue. func should put progress notifications into it in the form
        of a tuple (frac, msg). frac is a number between 0 and 1 indicating
        progress and msg is a string describing the progress. log is a Log
        object which func should use for all debugging output. func should
        raise an Exception to indicate failure. This exception is stored in
        job.exception and can thus be used to pass arbitrary information to
        callback.

        :param args,kwargs: These are passed to func when it is called

        :param callback: A callable that is called on completion of this job.
        Note that it is not called if the user kills the job. Check job.failed
        to see if the job succeeded or not. And use job.log to get the job log.

        :param killable: If False the GUI wont let the user kill this job

        :param log: Must be a subclass of GUILog or None. If None a default
        GUILog is created.
        '''
        BaseJob.__init__(self, description)

        self.type = type_
        self.max_concurrent_count = max_concurrent_count
        self.killable = killable
        self.callback = callback
        self.abort = Event()
        self.exception = None

        kwargs['notifications'] = self.notifications
        kwargs['abort'] = self.abort
        self.log = GUILog() if log is None else log
        kwargs['log'] = self.log

        self.func, self.args, self.kwargs = func, args, kwargs
        self.consolidated_log = None
コード例 #11
0
def single_covers(title, authors, identifiers, caches, tdir):
    os.chdir(tdir)
    load_caches(caches)
    log = GUILog()
    results = Queue()
    worker = Thread(target=run_download, args=(log, results, Event()),
            kwargs=dict(title=title, authors=authors, identifiers=identifiers))
    worker.daemon = True
    worker.start()
    while worker.is_alive():
        try:
            plugin, width, height, fmt, data = results.get(True, 1)
        except Empty:
            continue
        else:
            name = '%s,,%s,,%s,,%s.cover'%(plugin.name, width, height, fmt)
            with open(name, 'wb') as f:
                f.write(data)
            os.mkdir(name+'.done')

    return log.dump()
コード例 #12
0
ファイル: worker.py プロジェクト: jimman2003/calibre
def main(do_identify, covers, metadata, ensure_fields, tdir):
    failed_ids = set()
    failed_covers = set()
    all_failed = True
    log = GUILog()
    patch_plugins()

    for book_id, mi in iteritems(metadata):
        mi = OPF(BytesIO(mi), basedir=tdir,
                 populate_spine=False).to_book_metadata()
        title, authors, identifiers = mi.title, mi.authors, mi.identifiers
        cdata = None
        log.clear()

        if do_identify:
            results = []
            try:
                results = identify(log,
                                   Event(),
                                   title=title,
                                   authors=authors,
                                   identifiers=identifiers)
            except:
                pass
            if results:
                all_failed = False
                mi = merge_result(mi, results[0], ensure_fields=ensure_fields)
                identifiers = mi.identifiers
                if not mi.is_null('rating'):
                    # set_metadata expects a rating out of 10
                    mi.rating *= 2
                with open(os.path.join(tdir, '%d.mi' % book_id), 'wb') as f:
                    f.write(metadata_to_opf(mi, default_lang='und'))
            else:
                log.error('Failed to download metadata for', title)
                failed_ids.add(book_id)

        if covers:
            cdata = download_cover(log,
                                   title=title,
                                   authors=authors,
                                   identifiers=identifiers)
            if cdata is None:
                failed_covers.add(book_id)
            else:
                with open(os.path.join(tdir, '%d.cover' % book_id), 'wb') as f:
                    f.write(cdata[-1])
                all_failed = False

        with open(os.path.join(tdir, '%d.log' % book_id), 'wb') as f:
            f.write(log.plain_text.encode('utf-8'))

    return failed_ids, failed_covers, all_failed
コード例 #13
0
ファイル: main.py プロジェクト: pombreda/calibre-1
 def start(self):
     t = Thread(target=self.explode)
     t.daemon = True
     self.log = GUILog()
     t.start()
コード例 #14
0
ファイル: threaded_jobs.py プロジェクト: tokot/calibre
class ThreadedJob(BaseJob):
    def __init__(self,
                 type_,
                 description,
                 func,
                 args,
                 kwargs,
                 callback,
                 max_concurrent_count=1,
                 killable=True,
                 log=None):
        '''
        A job that is run in its own thread in the calibre main process

        :param type_: The type of this job (a string). The type is used in
        conjunction with max_concurrent_count to prevent too many jobs of the
        same type from running

        :param description: A user viewable job description

        :func: The function that actually does the work. This function *must*
        accept at least three keyword arguments: abort, log and notifications. abort is
        An Event object. func should periodically check abort.is_set() and if
        it is True, it should stop processing as soon as possible. notifications
        is a Queue. func should put progress notifications into it in the form
        of a tuple (frac, msg). frac is a number between 0 and 1 indicating
        progress and msg is a string describing the progress. log is a Log
        object which func should use for all debugging output. func should
        raise an Exception to indicate failure. This exception is stored in
        job.exception and can thus be used to pass arbitrary information to
        callback.

        :param args,kwargs: These are passed to func when it is called

        :param callback: A callable that is called on completion of this job.
        Note that it is not called if the user kills the job. Check job.failed
        to see if the job succeeded or not. And use job.log to get the job log.

        :param killable: If False the GUI wont let the user kill this job

        :param log: Must be a subclass of GUILog or None. If None a default
        GUILog is created.
        '''
        BaseJob.__init__(self, description)

        self.type = type_
        self.max_concurrent_count = max_concurrent_count
        self.killable = killable
        self.callback = callback
        self.abort = Event()
        self.exception = None

        kwargs['notifications'] = self.notifications
        kwargs['abort'] = self.abort
        self.log = GUILog() if log is None else log
        kwargs['log'] = self.log

        self.func, self.args, self.kwargs = func, args, kwargs
        self.consolidated_log = None

    def start_work(self):
        self.start_time = time.time()
        self.log('Starting job:', self.description)
        try:
            self.result = self.func(*self.args, **self.kwargs)
        except Exception as e:
            self.exception = e
            self.failed = True
            self.log.exception('Job: "%s" failed with error:' %
                               self.description)
            self.log.debug('Called with args:', self.args, self.kwargs)

        self.duration = time.time() - self.start_time
        try:
            self.callback(self)
        except:
            import traceback
            traceback.print_exc()
        self._cleanup()

    def _cleanup(self):

        try:
            self.consolidate_log()
        except:
            if self.log is not None:
                self.log.exception('Log consolidation failed')

        # No need to keep references to these around anymore
        self.func = self.args = self.kwargs = self.notifications = None
        # We can't delete self.callback as it might be a Dispatch object and if
        # it is garbage collected it won't work

    def kill(self):
        if self.start_time is None:
            self.start_time = time.time()
            self.duration = 0.0001
        else:
            self.duration = time.time() - self.start_time
            self.abort.set()

        self.log('Aborted job:', self.description)
        self.killed = True
        self.failed = True
        self._cleanup()

    def consolidate_log(self):
        logs = [self.log.html, self.log.plain_text]
        bdir = base_dir()
        log_dir = os.path.join(bdir, 'threaded_job_logs')
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        fd, path = tempfile.mkstemp(suffix='.json', prefix='log-', dir=log_dir)
        with os.fdopen(fd, 'wb') as f:
            f.write(
                json.dumps(logs, ensure_ascii=False, indent=2).encode('utf-8'))
        self.consolidated_log = path
        self.log = None

    def read_consolidated_log(self):
        with open(self.consolidated_log, 'rb') as f:
            return json.loads(f.read().decode('utf-8'))

    @property
    def details(self):
        if self.consolidated_log is None:
            return self.log.plain_text
        return self.read_consolidated_log()[1]

    @property
    def html_details(self):
        if self.consolidated_log is None:
            return self.log.html
        return self.read_consolidated_log()[0]
コード例 #15
0
ファイル: threaded_jobs.py プロジェクト: 089git/calibre
class ThreadedJob(BaseJob):

    def __init__(self,
            type_, description,

            func, args, kwargs,

            callback,

            max_concurrent_count=1,
            killable=True,
            log=None):
        '''
        A job that is run in its own thread in the calibre main process

        :param type_: The type of this job (a string). The type is used in
        conjunction with max_concurrent_count to prevent too many jobs of the
        same type from running

        :param description: A user viewable job description

        :func: The function that actually does the work. This function *must*
        accept at least three keyword arguments: abort, log and notifications. abort is
        An Event object. func should periodically check abort.is_set() and if
        it is True, it should stop processing as soon as possible. notifications
        is a Queue. func should put progress notifications into it in the form
        of a tuple (frac, msg). frac is a number between 0 and 1 indicating
        progress and msg is a string describing the progress. log is a Log
        object which func should use for all debugging output. func should
        raise an Exception to indicate failure. This exception is stored in
        job.exception and can thus be used to pass arbitrary information to
        callback.

        :param args,kwargs: These are passed to func when it is called

        :param callback: A callable that is called on completion of this job.
        Note that it is not called if the user kills the job. Check job.failed
        to see if the job succeeded or not. And use job.log to get the job log.

        :param killable: If False the GUI wont let the user kill this job

        :param log: Must be a subclass of GUILog or None. If None a default
        GUILog is created.
        '''
        BaseJob.__init__(self, description)

        self.type = type_
        self.max_concurrent_count = max_concurrent_count
        self.killable = killable
        self.callback = callback
        self.abort = Event()
        self.exception = None

        kwargs['notifications'] = self.notifications
        kwargs['abort'] = self.abort
        self.log = GUILog() if log is None else log
        kwargs['log'] = self.log

        self.func, self.args, self.kwargs = func, args, kwargs
        self.consolidated_log = None

    def start_work(self):
        self.start_time = time.time()
        self.log('Starting job:', self.description)
        try:
            self.result = self.func(*self.args, **self.kwargs)
        except Exception as e:
            self.exception = e
            self.failed = True
            self.log.exception('Job: "%s" failed with error:'%self.description)
            self.log.debug('Called with args:', self.args, self.kwargs)

        self.duration = time.time() - self.start_time
        try:
            self.callback(self)
        except:
            import traceback
            traceback.print_exc()
        self._cleanup()

    def _cleanup(self):

        try:
            self.consolidate_log()
        except:
            if self.log is not None:
                self.log.exception('Log consolidation failed')

        # No need to keep references to these around anymore
        self.func = self.args = self.kwargs = self.notifications = None
        # We can't delete self.callback as it might be a Dispatch object and if
        # it is garbage collected it won't work

    def kill(self):
        if self.start_time is None:
            self.start_time = time.time()
            self.duration = 0.0001
        else:
            self.duration = time.time() - self.start_time
            self.abort.set()

        self.log('Aborted job:', self.description)
        self.killed = True
        self.failed = True
        self._cleanup()

    def consolidate_log(self):
        logs = [self.log.html, self.log.plain_text]
        bdir = base_dir()
        log_dir = os.path.join(bdir, 'threaded_job_logs')
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        fd, path = tempfile.mkstemp(suffix='.json', prefix='log-', dir=log_dir)
        with os.fdopen(fd, 'wb') as f:
            f.write(json.dumps(logs, ensure_ascii=False,
                indent=2).encode('utf-8'))
        self.consolidated_log = path
        self.log = None

    def read_consolidated_log(self):
        with open(self.consolidated_log, 'rb') as f:
            return json.loads(f.read().decode('utf-8'))

    @property
    def details(self):
        if self.consolidated_log is None:
            return self.log.plain_text
        return self.read_consolidated_log()[1]

    @property
    def html_details(self):
        if self.consolidated_log is None:
            return self.log.html
        return self.read_consolidated_log()[0]