def __init__(self, doc, options):
     global UPLIB_SHARE
     DocumentParser.__init__(self, doc, options)
     self.musictrack = options.get("musictrack")
     self.genre = None
     self.artwork = None
     if UPLIB_SHARE is None:
         c = configurator()
         UPLIB_SHARE = c.get("uplib-share")
def main (argv):

    if len(argv) < 1 or (not os.path.isdir(argv[0])):
        sys.stderr.write("Invalid directory specified.\n")
        sys.exit(1)

    set_verbosity(4)
    files = os.listdir(argv[0])
    if ("docs" in files) and ("overhead" in files):
        from uplib.repository import Repository
        from uplib.plibUtil import configurator

        uplib_version = configurator().get("UPLIB_VERSION")
        r = Repository(uplib_version, argv[0], read_metadata(os.path.join(argv[0], "overhead", "metadata.txt")))

        build_index_1_0(r)
def _scan_rss_sites(repo):

    global _ADDED_SITES, _REMOVED_SITES

    try:
        from uplib.plibUtil import configurator, note, write_metadata, id_to_time, create_new_id
        from uplib.extensions import find_and_load_extension
        conf = configurator.default_configurator()

        if repo:
            sys_inits_path = os.path.join(conf.get('uplib-lib'), 'site-extensions')
            repo_inits_path = os.path.join(repo.root(), "overhead", "extensions", "active")
            upload_m = find_and_load_extension("UploadDocument", "%s|%s" % (repo_inits_path, sys_inits_path), None, True)
            if not upload_m:
                note(0, "Can't load UploadDocument extension!")
                sys.exit(1)
            else:
                note("UploadDocument extension is %s", upload_m)

        scan_period = conf.get_int("rss-scan-period", 60 * 2)
        startup_delay = conf.get_int("rss-startup-delay", 0)
        del conf

        import feedparser

        if startup_delay > 0:
            note(3, "startup delay is %d", startup_delay)
            time.sleep(startup_delay)

    except:
        note(0, "RSSReader:  exception starting RSS scan thread:\n%s",
             ''.join(traceback.format_exception(*sys.exc_info())))
        return

    rss_sites = -1
    while True:
        try:
            conf = configurator()       # re-read uplibrc file
            old_rss_sites = rss_sites
            rss_sites = conf.get("rss-sites")
            if old_rss_sites == -1 or (old_rss_sites != rss_sites):
                note(2, "rss_sites are %s", rss_sites)
            scan_period = conf.get_int("rss-scan-period", scan_period)
            expiration_period = conf.get_int("rss-expiration-period", 30 * 24 * 60 * 60)        # 30 days
            if rss_sites:
                rss_sites = rss_sites.split() + _ADDED_SITES
            else:
                rss_sites = _ADDED_SITES[:]
            if rss_sites:
                for site in _REMOVED_SITES:
                    if site in rss_sites:
                        rss_sites.remove(site)
            if rss_sites:
                feeds = []
                for site in rss_sites:
                    if site.startswith("feed:"):
                        feeds.append(feedparser.parse(site))
                    elif site.startswith("http:") or site.startswith("https:"):
                        feeds += find_feeds(site)
                note("feeds are:\n%s", [(x.feed.title, x.href, len(x.entries)) for x in feeds])
                for feed in feeds:
                    note("RSSReader:  %s: %s entries in feed %s", time.ctime(), len(feed.entries), feed.feed.title)
                    for entry in feed.entries:
                        d = process_entry(entry)
                        if not d:
                            continue
                        id = d.get("rss-id")
                        hits = repo.do_query('+rss-id:"%s"' % id)
                        if hits:
                            # already in repo
                            continue
                        if repo:
                            response = FakeResponse(repo)
                            mdoutput = StringIO.StringIO()
                            write_metadata(mdoutput, d)
                            md = mdoutput.getvalue()
                            mdoutput.close()
                            upload_m.add(repo, response, { 'URL': d.get("original-url"),
                                                           'wait': "true",
                                                           'no-redirect': "true",
                                                           'metadata': md,
                                                           'md-categories': "RSSReader/%s" % feed.feed.title,
                                                           })
                            if response.thread:
                                while response.thread.isAlive():
                                    response.thread.join(1.0)
                            note("RSSReader:  %s:  %s (%s: %s)", time.ctime(), repr(d.get("title")), response.code, response.message)
                        else:
                            note("RSSReader:  %s:  %s (%s)\n    %s", time.ctime(), repr(d.get("title")), d.get("date"), d.get("summary"))
            # now do expiries
            old_id = create_new_id(time.time() - expiration_period)[:-5]
            hits = repo.do_query("categories:RSSReader AND id:[00000-00-0000-000 TO %s] AND NOT categories:RSSReader/_noexpire_" % old_id)
            for score, doc in hits:
                # check to see if the user has looked at it
                if os.path.exists(os.path.join(doc.folder(), "activity")):
                    doc.add_category("RSSReader/_noexpire_", True)
                # and if not, remove it
                else:
                    repo.delete_document(doc.id)
            time.sleep(scan_period)
        except KeyboardInterrupt:
            if _IGNORE_KEYBOARD_INTERRUPTS:
                note(0, "RSSReader:  %s", ''.join(traceback.format_exception(*sys.exc_info())))
            else:
                sys.exit(0)                
        except:
            note(0, "RSSReader:  %s", ''.join(traceback.format_exception(*sys.exc_info())))
    return show_images (repo, response, params)

def after_repository_instantiation(repo):

    # add the ripper

    if FINDIMAGES_PROGRAM:
        rippers = repo.rippers()
        rippers.insert(1, ImageFindingRipper(repo))
    else:
        note("No findimages program found.")

# finally, find the "findimages" program

FINDIMAGES_PROGRAM = configurator().get("findimages")
try:
    if not FINDIMAGES_PROGRAM:
        FINDIMAGES_PROGRAM = os.popen("which findimages").readline().strip()
        if FINDIMAGES_PROGRAM.startswith("no findimages "):
            FINDIMAGES_PROGRAM=None
except:
    FINDIMAGES_PROGRAM=None

    
if __name__ == "__main__":
    from uplib.plibUtil import set_verbosity
    set_verbosity(4)
    FINDIMAGES_PROGRAM = "./findimages"
    findimages(sys.argv[1], True)
def manipulate_server_internal (repo, params, response=None, ipaddr=None, lgr=None):

    # regular UpLib action

    conf = params.get("configurator")
    if not conf:
        conf = configurator()
    imap_ssl_port = conf.get_int("imap-server-ssl-port", -1)
    imap_localhost_port = conf.get_int("imap-server-localhost-port", 8143)
    stunnel = conf.get("stunnel")
    expunge_deletes_docs = conf.get_bool("imap-expunge-deletes-documents", False)
    global CHECKPOINT_PERIOD
    CHECKPOINT_PERIOD = conf.get_int("imap-server-checkpoint-interval", 600)
    allow_anonymous_readers = ((not repo.has_password) and
                               conf.get_bool("imap-server-allow-anonymous-readers", True))
    use_for_email = conf.get_bool("imap-server-use-for-email", False)

    imap_dir = os.path.join(repo.overhead_folder(), "imap")
    if not os.path.isdir(imap_dir):
        os.mkdir(imap_dir)

    stunnel_pid_filepath = os.path.join(imap_dir, "stunnel.pid")
    if os.path.exists(stunnel_pid_filepath):
        stunnel_pid = int(open(stunnel_pid_filepath, 'r').read().strip())
    else:
        stunnel_pid = None

    # we cache the reference to the existing server in another
    # module so that we can reload this one with impunity
    current_server = emailParser.__dict__.get("IMAP_SERVER")
    note("current server is %s", current_server)

    action = params.get('action')
    newcontext = params.get('newcontext', False)

    if response:
        fp = response.open()
    else:
        fp = StringIO()

    fp.write('<body bgcolor="%s">\n' % STANDARD_BACKGROUND_COLOR)
    if current_server:
        s = current_server.status()
        m = s.more()
        while m:
            fp.write(m)
            m = s.more()
        fp.write('\n<hr>\n')
    else:
        fp.write('<h2>UpLib IMAP Server control panel</h2>\n')

    current_context = None
    if current_server and ((action == 'Stop') or (action == 'Restart')):

        if stunnel_pid:
            try:
                os.kill(stunnel_pid, signal.SIGKILL)
                time.sleep(4)
            except:
                pass
            stunnel_pid = None

        current_context = current_server.mailcontext
        current_server.close()
        current_server = None
        del emailParser.__dict__["IMAP_SERVER"]
        fp.write("<p>Closed current server.\n")

    if os.path.exists(stunnel_pid_filepath):
        os.unlink(stunnel_pid_filepath)

    if (action == 'Start') or (action == 'Restart'):

        cert_filepath = os.path.join(repo.overhead_folder(), repo.certfilename())
        
        try:
            port = params.get("port")
            if port:
                port = int(port)
            else:
                port = imap_localhost_port

            if stunnel and ((not ssl) or (imap_ssl_port > 0)):

                # start stunnel
                stunnel_conf_filepath = os.path.join(imap_dir, "stunnel.conf")
                f = open(stunnel_conf_filepath, 'w')
                f.write("debug = 7\n\ncert = %s\noutput = %s\npid = %s\n\n[imapuplib]\naccept = %s\nconnect = 127.0.0.1:%s\n" %
                        (cert_filepath, os.path.join(imap_dir, "stunnel.log"), stunnel_pid_filepath,
                         str(imap_ssl_port), str(port)))
                f.close()
                status, tsignal, output = subproc("%s %s" % (stunnel, stunnel_conf_filepath))
                note("status from '%s %s' (on %s) is %s, output is <%s>", stunnel, stunnel_conf_filepath, imap_ssl_port, status, output)
                if status != 0:
                    raise RuntimeError("Can't start stunnel with '%s %s'; status is %s, output is %s" % (stunnel, stunnel_conf_filepath, status, output))
                stunnel_pid = int(open(stunnel_pid_filepath, 'r').read().strip())
                note("stunnel_pid is %s", stunnel_pid)

            else:
                stunnel_pid = None

            if newcontext or (not current_context):
                current_context = uplib_mailcontext(repo,
                                                    expunge_deletes_docs=expunge_deletes_docs,
                                                    allow_readers=allow_anonymous_readers,
                                                    use_for_email=use_for_email,
                                                    ip=get_fqdn(),
                                                    server_certificate_file=cert_filepath)
            if current_context.inbox:
                current_context.inbox.rescan()
            if stunnel_pid is not None:
                ipaddr = '127.0.0.1'
            else:
                ipaddr = '0.0.0.0'

            if not lgr:
                lgr = logger.rotating_file_logger (os.path.join(imap_dir, "imap.log"), "weekly", None, True)
                lgr = logger.unresolving_logger(lgr)

            imaps = imap_server (current_context, ipaddr, port, logger=lgr, stunnel_pid=stunnel_pid)
            emailParser.__dict__["IMAP_SERVER"] = imaps
            current_server = imaps

            hooked = emailParser.__dict__.get("IMAP_SERVER_SHUTDOWN_HOOK")
            if not hooked:
                repo.add_shutdown_hook(lambda x=repo: shutdown_server(x))
                emailParser.__dict__["IMAP_SERVER_SHUTDOWN_HOOK"] = True

            if stunnel_pid:
                fp.write("<p>Started new IMAP4 server for %s on ports %s/%s."
                         % (repr(repo), str(imap_ssl_port), str(port)))
            else:
                fp.write("<p>Started new IMAP4 server for %s on port %s."
                         % (repr(repo), str(port)))
            if current_context.inbox:
                fp.write("<p>Inbox:  %d messages, %d recent, %d unseen."
                         % (len(current_context.inbox.msgs),
                            len(current_context.inbox.recent()),
                            current_context.inbox.min_unseen()))
        except:
            type, value, tb = sys.exc_info()
            s = string.join(traceback.format_exception(type, value, tb))
            note("Can't establish IMAP server:  exception:  " + s)
            fp.write(s)

    fp.write('<form method=GET action="/action/IMAPServer/manipulate_server">\n')
    fp.write('<input type=submit name=action value="Start" %s>\n' % ((current_server and "disabled") or ""))
    fp.write('<input type=submit name=action value="Stop" %s>\n' % (((current_server == None) and "disabled") or ""))
    fp.write('<input type=submit name=action value="Restart" %s>\n' % (((current_server == None) and "disabled") or ""))
    fp.write('<input type=checkbox name="newcontext" %s> Use fresh mail context\n' % (newcontext and "checked") or "")
    fp.write('</form>\n')
    fp.write('</body>\n')