def te_st_live_cylce_time_for_issues_with_cache(self):
        username = os.environ['username']
        password = os.environ['password']

        from tempfile import mkdtemp
        cachedir = mkdtemp()
        cache = pyfscache.FSCache(cachedir, days=14)
        yt = KanbanAwareYouTrackConnection('https://tickets.i.gini.net',
                                           username, password, cache)
        print 'connected to [%s]' % yt.baseUrl
        cycle_time_issues = yt.get_cycle_time_issues('Backend', 10)

        print 'found %d issues with cycle times' % len(cycle_time_issues)
        [
            self.assertIsNotNone(cycle_time_issue.cycle_time)
            for cycle_time_issue in cycle_time_issues
        ]
示例#2
0
def make_cache_it(folder="./cache",
                  years=0,
                  months=0,
                  weeks=0,
                  days=10,
                  hours=0,
                  minutes=0,
                  seconds=0):
    _cache_it = pyfscache.FSCache(
        folder,
        years=years,
        months=months,
        weeks=weeks,
        days=days,
        hours=hours,
        minutes=minutes,
        seconds=seconds,
    )
    return _cache_it
示例#3
0
    def __init__(
            self,
            cache_dir=DEFAULT_CACHE_DIR,
            timeout=DEFAULT_CACHE_TIMEOUT
    ):
        """
        Create a new PyStan cache.

        :param str|None cache_dir: the path to the cache directory
        :param timedelta|None timeout: how long until the cache expires
        """
        self._cache = pyfscache.FSCache(
            cache_dir, seconds=timeout.total_seconds()
        )

        @self._cache
        def _stan_model(pystan_version, **kwargs):
            return StanModel(**kwargs)

        self._stan_model = _stan_model
示例#4
0
        def wrap_function(target_function):
            """Wrap the target function."""

            cache_it = pyfscache.FSCache(cache_path,
                                         hours=PREFS['cache_hours'])

            def instance_function(*args, **kwargs):
                """
                Wrap the instance function to pop the 'self' instance off
                the arguments list.
                """
                self = args[0]

                @cache_it
                def cached_function(*args, **kwargs):
                    """Wrap the actual function to cache its return value."""
                    return target_function(self, *args, **kwargs)

                return cached_function(*args[1:], **kwargs)

            return instance_function
#!/usr/bin/python
# -*- coding: utf-8 -*-

import pyfscache
import requests
import signal
import socket
import subprocess
import sys
import textwrap
import time

# Define our local caching decorator.
cache = pyfscache.FSCache('cache', minutes=30)


class Dashboard(object):
    """
    Gather data from the Interonlinewebnets and post it to a
    separately running dashboard program via HTTP.
    """
    def __init__(self):
        """Regular constructor."""
        # Handle INT specially
        signal.signal(signal.SIGHUP, self.refresh)
        signal.signal(signal.SIGINT, self.close)

    def get_fortune(self):
        """Get a `fortune' from the local fortune program."""
        fortune = subprocess.check_output(['fortune', '-s'])
        fortune = textwrap.wrap(fortune.replace('\n', ' '), 60)
示例#6
0
文件: safepass.py 项目: suji2995/Exp
#!/usr/bin/env python
import rspwsafe
import pyfscache

cache = pyfscache.FSCache('.safepass-cache', days=1)


#cache disabled as cache error handling not implemented
#@cache
def get_safepass(project_id, cred_id):
    """Given PasswordSafe project id and credential description
    returns (host, login, password) tuple of None on error"""

    safe = rspwsafe.PWSafe()
    safe.set_project(project_id)
    json_creds = safe.get_creds()

    results = [item['credential'] for item in json_creds
               if item['credential']['id'] == cred_id]

    try:
        (host, login, passwd) = (results[0]['hostname'],
                                 results[0]['username'],
                                 results[0]['password'])
    except IndexError:
        return None
    return (host, login, passwd)
示例#7
0
    return tsls


@task
def get_horizon_url(tenant, glacierroot=glacier_root):
    """ Print (and return) the horizon url for the specfied tenant """

    tsls = get_tenant_sls(tenant, glacierroot)
    host = urlparse(tsls["OS_AUTH_URL"]).hostname
    horizon_url = "http://%s/horizon" % (host)
    print horizon_url
    return horizon_url


CACHEDIR = basedir + "/deployer_ips.cache"
cache_ip = pyfscache.FSCache(CACHEDIR, days=7)


@cache_ip
def get_cached_deployer_ip(tenant, glacierroot):
    tsls = get_tenant_sls(tenant, glacierroot)
    nc = get_tenant_nova_client(tsls)
    #print repr(nc)
    instances = nc.servers.list()
    print "got deps: "
    for i in instances:
        if "deployer" in i.name.lower():
            print i.name
    dep = [
        i.networks for i in instances
        if "deployer" in i.name.lower() and "mongo" not in i.name
示例#8
0
import moment
try:
    import ujson as json
except ImportError:
    import json

# Script version. It's recommended to increment this with every change, to make
# debugging easier.
VERSION = '1.0.1'
BASE_URL = 'https://api.everhour.com'

# Set up logging.
log = logging.getLogger('{0}[{1}]'.format(os.path.basename(sys.argv[0]),
                                          os.getpid()))

cache = pyfscache.FSCache(tempfile.gettempdir(), hours=1)


def run():
    """Main entry point run by __main__ below. No need to change this usually.
    """
    args = parse_args()
    setup_logging(args)
    config = get_config(args)

    log.debug('Starting process (version %s).', VERSION)
    log.debug('Arguments: %r', args)

    # run the application
    try:
        main(args, config)
示例#9
0
DEBUG = REAL_SETTINGS.getSetting('enable_Debug')
SETTOP = REAL_SETTINGS.getSetting("EnableSettop") == "true"
OS_SET = int(REAL_SETTINGS.getSetting("os"))
ENHANCED_DATA = REAL_SETTINGS.getSetting('EnhancedGuideData') == 'true'

# common cache globals
daily = StorageServer.StorageServer("plugin://script.pseudotv.live/" + "daily",
                                    24)
weekly = StorageServer.StorageServer(
    "plugin://script.pseudotv.live/" + "weekly", 24 * 7)
monthly = StorageServer.StorageServer(
    "plugin://script.pseudotv.live/" + "monthly", ((24 * 7) * 4))

# cache globals
cache_daily = pyfscache.FSCache((os.path.join(LOCK_LOC, 'requests', '')),
                                days=1,
                                hours=0,
                                minutes=0)
cache_weekly = pyfscache.FSCache((os.path.join(LOCK_LOC, 'requests', '')),
                                 days=7,
                                 hours=0,
                                 minutes=0)
cache_monthly = pyfscache.FSCache((os.path.join(LOCK_LOC, 'requests', '')),
                                  days=31,
                                  hours=0,
                                  minutes=0)

if REAL_SETTINGS.getSetting('EnableSettop') == 'true':
    SETTOP_REFRESH = REFRESH_INT[int(REAL_SETTINGS.getSetting('REFRESH_INT'))]
else:
    SETTOP_REFRESH = 72000
示例#10
0
# -*- coding: utf-8 -*-
from config import config
import tweepy
import pyfscache

auth = tweepy.OAuthHandler(config['consumer_key'], config['consumer_secret'])
auth.set_access_token(config['access_token'], config['access_token_secret'])
api = tweepy.API(auth)

cache_result = pyfscache.FSCache('cache/tweets', days=1)


@cache_result
def getByHashtags(str):
    items = tweepy.Cursor(api.search, q=str).items(10)
    for item in items:
        print item.text.encode('utf-8')


# convert data to array for google api
def arrayify(object):
    result = [['Hashtag', 'Number']]
    for key in object:
        row = []
        row.append(key)
        row.append(object[key])
        result.append(row)
    return result


# count hashtags and store in a dict
示例#11
0
    response = Response(res.content, res.status_code, [('Content-Type', res.headers['Content-Type'])])
    cache_it[endpoint] = response
    return response

@app.route("/stats")
def get_stat():
    # no need to lock when reading, it is OK to get obsolete stat data
    # of course we can also use rwlock, but let's simplify everything

    return jsonify({'slow_requests':conn.hgetall('slow_requests'), 'queries':conn.hgetall('queries')})

if __name__ == '__main__':
    if 'SLOW_THRESH' in os.environ:
        try:
            SLOW_THRESH = float(os.environ['SLOW_THRESH'])
            print "SLOW_THRESH set by config: %.1f seconds" % SLOW_THRESH
        except ValueError as e:
            print "Incompatible env var for SLOW_THRESH, use default value 1 second"

    if 'CACHE_EXPIRE' in os.environ:
        try:
            CACHE_EXPIRE = int(os.environ['CACHE_EXPIRE'])
            print "CACHE_EXPIRE set by config: %d minutes" % CACHE_EXPIRE
        except ValueError as e:
            print "Incompatible env var for CACHE_EXPIER, use default value 5 minutes"

    print "Reverse Proxy Started, Listening at Port 80..."
    cache_it = pyfscache.FSCache('cache', minutes=CACHE_EXPIRE)
    app.run(host='0.0.0.0', port=80, debug=False, threaded=True)
示例#12
0
parser.add_argument(
    '--airports',
    dest='airports',
    default=['CYUL'],
    help='List of airports to fetch flights from, space separated',
    nargs='+')
parser.add_argument('--cache-timeout',
                    dest='cache_timeout',
                    default=5,
                    help='cache info for this amount of minutes',
                    type=int)
args = parser.parse_args()
show_all = args.show_all
airports = args.airports
fa_page_cache_timeout = args.cache_timeout
fa_page_cache = pyfscache.FSCache('/tmp/get_flights',
                                  minutes=fa_page_cache_timeout)

current_day = date.today().strftime("%a")
fa_base_url = 'http://flightaware.com/live/airport/'
fa_phases = [[
    'ARRIVALS', "/enroute?;offset=%s;order=estimatedarrivaltime;sort=ASC"
], ['DEPARTURE', "/scheduled?;offset=%s;order=filed_departuretime;sort=ASC"]]


def print_flights(fa_phase, flight_info, trigger):
    flight_number = flight_info[0].get_text().encode('utf-8').strip()
    plane_type = flight_info[1].get_text().encode('utf-8')
    plane_name = None

    for i in atd:
        if i['Designator'] == plane_type:
示例#13
0
from sklearn.externals import joblib
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
import cPickle
import pyfscache

cache_it = pyfscache.FSCache('./cache', days=10, hours=12, minutes=30)


class Classifier(object):
    def __init__(self, data_path='../data/'):
        self.data_path = data_path

    @cache_it
    def getModels(self):
        with open(self.data_path + '/categories.pkl', 'rb') as f:
            categories = cPickle.load(f)

        with open(self.data_path + '/category_map.pkl', 'rb') as f:
            category_map = cPickle.load(f)

        with open(self.data_path + '/article_classifier_model.pkl', 'rb') as f:
            clf = cPickle.load(f)

        count_vect = CountVectorizer()
        with open(self.data_path + '/count_vect.pkl', 'rb') as f:
            count_vect = cPickle.load(f)

        tfidf_transformer = TfidfTransformer()
        with open(self.data_path + '/tfidf_transformer.pkl', 'rb') as f:
            tfidf_transformer = cPickle.load(f)
示例#14
0
# -*- coding: utf-8 -*-
from xmltodict import parse
import requests
import pyfscache
from django import template

register = template.Library()
cache_it = pyfscache.FSCache('/tmp/rates.cache', minutes=60)


@register.assignment_tag
def get_cbr_rates():
    try:
        return cache_it['rates']
    except KeyError:
        pass
    try:
        result = get_cbr_rates_cached()
        cache_it['rates'] = result
        return result
    except:
        return None


def get_cbr_rates_cached():
    rates = dict()

    response = requests.get('http://www.cbr.ru/scripts/XML_daily.asp')
    response.encoding = 'cp1251'
    text = response.text.encode('utf-8').replace('windows-1251', 'utf-8')
示例#15
0
from BeautifulSoup import BeautifulSoup

__settings__ = xbmcaddon.Addon(id='plugin.audio.vietmusic')
__language__ = __settings__.getLocalizedString
home = __settings__.getAddonInfo('path')
icon = xbmc.translatePath(os.path.join(home, 'icon.png'))
thumbnails = xbmc.translatePath(os.path.join(home, 'thumbnails\\'))

__video_quality = __settings__.getSetting(
    'video_quality')  #values="240p|360p|480p|720p|1080p"
__mp3_quality = __settings__.getSetting(
    'mp3_quality')  #values="32K|128K|320K|500K|Lossless"

cachePath = xbmc.translatePath(os.path.join(home, 'cache'))
cache = pyfscache.FSCache(cachePath, days=7)

__thumbnails = []


def toast(message, timeout=7000):
    xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' %
                         ('VietMovie', message, timeout)).encode("utf-8"))


def messageBox(title='VietMovie', message='message'):
    dialog = xbmcgui.Dialog()
    dialog.ok(str(title), str(message))


def setCache(key, value):
示例#16
0
from nltk.stem.snowball import SnowballStemmer
import wikipedia
from wordnik import swagger, WordApi
from vocabulary.vocabulary import Vocabulary as vb

if (sys.version_info < (3, 0)):
    reload(sys)
    sys.setdefaultencoding('utf8')

apiUrl = 'http://api.wordnik.com/v4'
apiKey = 'a1b28252f1c2bd049897a03d4e81e85c5d6dbca71cb8dcac8'
client = swagger.ApiClient(apiKey, apiUrl)
wordApi = WordApi.WordApi(client)
stemmer = SnowballStemmer("english")

fs_cache = pyfscache.FSCache('data/cache/')
wikipedia.set_rate_limiting(True)

DEBUG = False
PUNC = set(string.punctuation)


def clean_str(string):
    """
    Cleans a str by making it all lower case, removing punctuation, and removing any html

    Args:
        string: the str to clean
    Returns:
        the cleaned string
    """
示例#17
0
def set_cache(cache_dir):
    cache = pyfscache.FSCache(cache_dir)
    expertfinding.entities = cache(expertfinding.entities)