Ejemplo n.º 1
0
def pingback(src, dest, dryrun=False):
    """Makes a pingback request to dest on behalf of src, i.e. effectively
    saying to dest that "the page at src is linking to you"."""

    def search_link(content):
        match = re.search(b'<link rel="pingback" href="([^"]+)" ?/?>', content)
        return match and match.group(1)

    try:
        r = head(dest)
    except (URLError, HTTPError) as e:
        return

    try:
        server_url = r.info().get('X-Pingback', '') or search_link(r.read(512 * 1024))
        if server_url:

            print("Pingback", blue(urlparse(server_url).netloc), end='')
            print("from", green(''.join(urlparse(src)[1:3])) + ".")

            if not dryrun:
                server = xmlrpc.client.ServerProxy(server_url)
                server.pingback.ping(src, dest)
    except xmlrpclib.Fault as e:
        log.warn("XML-RPC fault: %d (%s)", e.faultCode, e.faultString)
    except xmlrpc.client.ProtocolError as e:
        raise AcrylamidException(e.args[0])
Ejemplo n.º 2
0
def pingback(src, dest, dryrun=False):
    """Makes a pingback request to dest on behalf of src, i.e. effectively
    saying to dest that "the page at src is linking to you"."""
    def search_link(content):
        match = re.search(b'<link rel="pingback" href="([^"]+)" ?/?>', content)
        return match and match.group(1)

    try:
        r = head(dest)
    except (URLError, HTTPError) as e:
        return

    try:
        server_url = r.info().get('X-Pingback', '') or search_link(
            r.read(512 * 1024))
        if server_url:

            print("Pingback", blue(urlparse(server_url).netloc), end='')
            print("from", green(''.join(urlparse(src)[1:3])) + ".")

            if not dryrun:
                server = xmlrpc.client.ServerProxy(server_url)
                server.pingback.ping(src, dest)

    except xmlrpc.client.ProtocolError as e:
        raise AcrylamidException(e.args[0])
Ejemplo n.º 3
0
def run(conf, env, options):
    """Subcommand: info -- a short overview of a blog."""

    limit = options.max if options.max > 0 else 5
    entrylist = sorted(
        [
            Entry(e, conf)
            for e in utils.filelist(conf["content_dir"], conf.get("entries_ignore", []))
            if utils.istext(e)
        ],
        key=lambda k: k.date,
        reverse=True,
    )

    print
    print "acrylamid", blue(env["version"]) + ",",
    print "cache size:", blue("%0.2f" % (cache.size / 1024.0 ** 2)) + " mb"
    print

    for entry in entrylist[:limit]:
        print "  ", green(ago(entry.date).ljust(13)),
        print white(entry.title) if entry.draft else entry.title

    print
    print "%s published," % blue(len([e for e in entrylist if not e.draft])),
    print "%s drafted articles" % blue(len([e for e in entrylist if e.draft]))

    time = localtime(getmtime(join(conf.get("cache_dir", ".cache/"), "info")))
    print "last compilation at %s" % blue(strftime("%d. %B %Y, %H:%M", time))
Ejemplo n.º 4
0
def do_summary(conf, env, options):

    limit = options.max if options.max > 0 else 5
    entrylist, pages, translations, drafts = readers.load(conf)

    entrylist = sorted(entrylist + translations + drafts,
        key=lambda k: k.date, reverse=True)

    print()
    print('Acrylamid', blue(env['version']) + ',', end=' ')
    print('cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb')
    print()

    for entry in entrylist[:limit]:
        print('  ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)), end=' ')
        print(white(entry.title) if entry.draft else normal(entry.title))

    print()
    print('%s published,' % blue(len([e for e in entrylist if not e.draft])), end=' ')
    print('%s drafted articles' % blue(len([e for e in entrylist if e.draft])))

    if not isfile(join(conf.get('cache_dir', '.cache/'), 'info')):
        return

    time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info')))
    print('last compilation at %s' % blue(u(strftime(u'%d. %B %Y, %H:%M', time))))
Ejemplo n.º 5
0
def do_summary(conf, env, options):

    limit = options.max if options.max > 0 else 5
    entrylist, pages, translations, drafts = readers.load(conf)

    entrylist = sorted(entrylist + translations + drafts,
                       key=lambda k: k.date,
                       reverse=True)

    print()
    print('Acrylamid', blue(env['version']) + ',', end=' ')
    print('cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb')
    print()

    for entry in entrylist[:limit]:
        print('  ',
              green(ago(entry.date.replace(tzinfo=None)).ljust(13)),
              end=' ')
        print(white(entry.title) if entry.draft else normal(entry.title))

    print()
    print('%s published,' % blue(len([e for e in entrylist if not e.draft])),
          end=' ')
    print('%s drafted articles' % blue(len([e for e in entrylist if e.draft])))

    if not isfile(join(conf.get('cache_dir', '.cache/'), 'info')):
        return

    time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info')))
    print('last compilation at %s' %
          blue(u(strftime(u'%d. %B %Y, %H:%M', time))))
Ejemplo n.º 6
0
def w3c(paths, conf, warn=False, sleep=0.2):
    """Validate HTML by using the validator.w3.org API.

    :param paths: a list of HTML files we map to our actual domain
    :param conf: configuration
    :param warn: don't handle warnings as success when set
    :param sleep: sleep between requests (be nice to their API)"""

    for path in paths:
        url = path[len(conf['output_dir']) - 1:]

        resp = head("http://validator.w3.org/check?uri=" + \
            helpers.joinurl(conf['www_root'], quote(url)))

        print(helpers.rchop(url, 'index.html'), end=' ')

        if resp.code != 200:
            print(red('not 200 Ok!'))
            continue

        headers = resp.info()
        if headers['x-w3c-validator-status'] == "Abort":
            print(red("Abort"))
        elif headers['x-w3c-validator-status'] == 'Valid':
            if int(headers['x-w3c-validator-warnings']) == 0:
                print(green('Ok'))
            else:
                if warn:
                    print(
                        yellow(headers['x-w3c-validator-warnings'] + ' warns'))
                else:
                    print(green('Ok'))
        else:
            res = headers['x-w3c-validator-errors'] + ' errors, ' + \
                  headers['x-w3c-validator-warnings'] + ' warns'
            print(red(res))

        time.sleep(sleep)
Ejemplo n.º 7
0
def w3c(paths, conf, warn=False, sleep=0.2):
    """Validate HTML by using the validator.w3.org API.

    :param paths: a list of HTML files we map to our actual domain
    :param conf: configuration
    :param warn: don't handle warnings as success when set
    :param sleep: sleep between requests (be nice to their API)"""

    for path in paths:
        url = path[len(conf['output_dir'])-1:]

        resp = head("http://validator.w3.org/check?uri=" + \
            helpers.joinurl(conf['www_root'], quote(url)))

        print helpers.rchop(url, 'index.html'),

        if resp.code != 200:
            print red('not 200 Ok!')
            continue

        headers = resp.info()
        if headers['x-w3c-validator-status'] == "Abort":
            print red("Abort")
        elif headers['x-w3c-validator-status'] == 'Valid':
            if int(headers['x-w3c-validator-warnings']) == 0:
                print green('Ok')
            else:
                if warn:
                    print yellow(headers['x-w3c-validator-warnings'] + ' warns')
                else:
                    print green('Ok')
        else:
            res = headers['x-w3c-validator-errors'] + ' errors, ' + \
                  headers['x-w3c-validator-warnings'] + ' warns'
            print red(res)

        time.sleep(sleep)
Ejemplo n.º 8
0
def do_summary(conf, env, options):

    limit = options.max if options.max > 0 else 5
    entrylist, pages = readers.load(conf)

    print
    print 'acrylamid', blue(env['version']) + ',',
    print 'cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb'
    print

    for entry in entrylist[:limit]:
        print '  ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)),
        print white(entry.title) if entry.draft else normal(entry.title)

    print
    print '%s published,' % blue(len([e for e in entrylist if not e.draft])),
    print '%s drafted articles' % blue(len([e for e in entrylist if e.draft]))

    time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info')))
    print 'last compilation at %s' % blue(strftime('%d. %B %Y, %H:%M', time))
Ejemplo n.º 9
0
def run(conf, env, options):
    """Subcommand: info -- a short overview of a blog."""

    limit = options.max if options.max > 0 else 5
    commands.initialize(conf, env)
    entrylist, pages = readers.load(conf)

    print
    print "acrylamid", blue(env["version"]) + ",",
    print "cache size:", blue("%0.2f" % (cache.size / 1024.0 ** 2)) + " mb"
    print

    for entry in entrylist[:limit]:
        print "  ", green(ago(entry.date.replace(tzinfo=None)).ljust(13)),
        print white(entry.title) if entry.draft else normal(entry.title)

    print
    print "%s published," % blue(len([e for e in entrylist if not e.draft])),
    print "%s drafted articles" % blue(len([e for e in entrylist if e.draft]))

    time = localtime(getmtime(join(conf.get("cache_dir", ".cache/"), "info")))
    print "last compilation at %s" % blue(strftime("%d. %B %Y, %H:%M", time))
Ejemplo n.º 10
0
def run(conf, env, options):
    """Subcommand: info -- a short overview of a blog."""

    limit = options.max if options.max > 0 else 5
    commands.initialize(conf, env)
    entrylist, pages = readers.load(conf)

    print
    print 'acrylamid', blue(env['version']) + ',',
    print 'cache size:', blue('%0.2f' % (cache.size / 1024.0**2)) + ' mb'
    print

    for entry in entrylist[:limit]:
        print '  ', green(ago(entry.date.replace(tzinfo=None)).ljust(13)),
        print white(entry.title) if entry.draft else entry.title

    print
    print '%s published,' % blue(len([e for e in entrylist if not e.draft])),
    print '%s drafted articles' % blue(len([e for e in entrylist if e.draft]))

    time = localtime(getmtime(join(conf.get('cache_dir', '.cache/'), 'info')))
    print 'last compilation at %s' % blue(strftime('%d. %B %Y, %H:%M', time))