示例#1
0
def read_cfg(fname):
    """
    Read the config.
    """

    with open(fname, "r") as fobj:
        cfg = yaml.load(fobj)
    cfg = AttrDict(cfg)

    cfg.save_dir = abspath(cfg.save_dir)
    cfg.seenurls_fname = abspath(cfg.seenurls_fname)
    cfg.nitrogen_conf_fname = abspath(cfg.nitrogen_conf_fname)

    return cfg
示例#2
0
文件: cache.py 项目: parantapa/pypb
def diskcache(cachedir="~/pypb_fncache"):
    """
    Return a function which memoizes the result of the original function.
    """

    cachedir = abspath(cachedir)

    def decorator_fn(origfn):  # pylint: disable=missing-docstring

        # If cachedir doesn't exist, create it
        if not os.path.exists(cachedir):
            log.notice("Creating cache folder at {} ...", cachedir)
            os.makedirs(cachedir)

        # Get the function hash
        fhash = fnhash(origfn)

        @wraps(origfn)
        def newfn(*args, **kwargs):
            """
            Return result from cache if possible.
            """

            # Steal some parameters
            force_miss = kwargs.pop("force_miss", False)
            force_before = kwargs.pop("force_before", datetime.utcnow())

            # Compute the function code and argument hash
            runhash = cPickle.dumps((fhash, args, kwargs), -1)
            runhash = hashlib.sha1(runhash).hexdigest()
            fname = "{}/{}.pickle".format(cachedir, runhash)

            # Cache hit
            if os.path.exists(fname) and not force_miss:
                log.info("Cache hit for {} in {} ...", origfn.__name__, origfn.func_code.co_filename)
                with open(fname, "rb") as fobj:
                    ret = cPickle.load(fobj)
                if ret["at"] < force_before:
                    return ret["result"]
                log.info("Cache result too old skipping ...")

            # Cache miss
            log.info("Cache miss for {} in {} ...", origfn.__name__, origfn.func_code.co_filename)
            ret = {
                "at": datetime.utcnow(),
                "func_name": origfn.__name__,
                "func_filename": origfn.func_code.co_filename,
                "func_source": inspect.getsource(origfn),
                "args": args,
                "kwargs": kwargs,
                "runhash": runhash,
                "result": origfn(*args, **kwargs),
            }
            with open(fname, "wb") as fobj:
                cPickle.dump(ret, fobj, -1)
            return ret["result"]

        return newfn

    return decorator_fn
示例#3
0
def do_main(statefile):
    """
    Run the actual code.
    """

    try:
        _, cfname = sys.argv # pylint: disable=unbalanced-tuple-unpacking
    except ValueError:
        print("Usage: ./reddit-bg.py <config.yaml>")
        sys.exit(1)
    cfname = abspath(cfname)

    # Load config
    cfg = read_cfg(cfname)

    # Create the savedir if not exists
    if not isdir(cfg.save_dir):
        log.info("Creating image saving directory {} ...", cfg.save_dir)
        os.makedirs(cfg.save_dir, 0o700)

    while True:
        # Reload config
        cfg = read_cfg(cfname)

        # Try to get set background
        for screen, mode in cfg.screens:
            set_background(screen, mode, cfg, statefile)

        # Go to sleep
        log.info("Next update after {} minutes.", cfg.update_interval)
        state_update(SYMB_SLEEPING, statefile)
        time.sleep(cfg.update_interval * 60)
示例#4
0
文件: dmn.py 项目: parantapa/pypb
def daemonize(prefix=None, logdir="~/pypb_dmnlog"):
    """
    Daemonize the process.
    """

    logdir = abspath(logdir)

    # Create the directory if not exists
    if not os.path.exists(logdir):
        print "Folder '{}' doesn't exist. Creating ...".format(logdir)
        os.makedirs(logdir)

    # Default prefix is script name - the py prefix
    if prefix is None:
        prefix = sys.argv[0]
        if prefix.endswith(".py"):
            prefix = prefix[:-2]
        # Clean the prefix as it is uses in a filename
        prefix = "".join(c if fnamechar(c) else "_" for c in prefix)
    if prefix[-1] != ".":
        prefix = prefix + "."

    # Add start time to file prefix
    prefix = prefix + datetime.utcnow().strftime(LOGTIMEFMT)

    # Setup context
    dc = daemon.DaemonContext()
    dc.working_directory = "."
    dc.umask = 0o022
    dc.signal_map = dict.fromkeys(STD_EXIT_SIGNALS, exit_signal)

    # Do the redirection
    fobj = tempfile.NamedTemporaryFile(dir=logdir, delete=False,
                                       prefix=prefix, suffix=".log")
    dc.stdout = fobj
    dc.stderr = fobj

    # Print outfile name to follow
    print "STDOUT:", fobj.name
    sys.stdout.flush()

    # Ignore SIGHUP before daemonizeing
    # Otherwise the child might get a SIGHUP when daemonizing
    signal.signal(signal.SIGHUP, signal.SIG_IGN)

    # Daemonize
    dc.open()

    # Register the print stats function in daemon
    atexit.register(print_stats)
示例#5
0
from __future__ import division, print_function, unicode_literals

__author__ = "Parantapa Bhattachara <pb [at] parantapa [dot] net>"

import time
import subprocess as sub

from pypb import abspath
from pbapps_common import do_main

MODULE = "git-multi-status"

C_WHITE = "#f8f8f2"
C_RED = "#f92672"

REPO_DIRS = abspath("~/.repo-dirs")

def get_git_status():
    """
    Get git status.
    """

    with open(REPO_DIRS, "r") as fobj:
        lines = fobj.readlines()
    lines = [l.strip() for l in lines]
    lines = [l for l in lines if l]

    cmd = ["git-multi-status"] + lines
    try:
        status = sub.check_output(cmd).strip()
    except sub.CalledProcessError: