Beispiel #1
0
 def configure(self, config):
     cache_regions.update({
         'twitter': {
             'expire':
                 ckanext.twitter.lib.config_helpers
                     .twitter_hours_between_tweets(),
             'type': 'memory',
             'enabled': True,
             'key_length': 250
             }
         })
legiscan_key = os.getenv("LEGISCAN_KEY")
propublica_key = os.getenv("PROPUBLICA_KEY")

# Set cache options
cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

# Create cache regions
cache_regions.update({
    'day_term': {
        'expire': 86400
    },
    'hour_term': {
        'expire': 3600
    }
})

# Create cache for this instance
cache = CacheManager(**parse_cache_config_options(cache_opts))


class APIManager:
    def __init__(self):
        atexit.register(self.__on_exit)

        # Load the bill texts from the persistent cache if it's less than a week old
        self.bill_texts = self.__load_pickle("pickles/billtext_cache.p")
        self.bill_subjects = self.__load_pickle("pickles/billsubject_cache.p")
Beispiel #3
0
except ImportError:
    JIRA = None
    jira_active = False

try:
    import evolution
    from evolution import ecal
except ImportError:
    ecal = None
    evolution = None

# configure regions
cache_regions.update({
    'short_term': {
        'expire': 60 * 1000,
        'type': 'memory',
        'key_length': 250
    }
})
logger = logging.getLogger("external")

SOURCE_NONE = ""
SOURCE_GTG = 'gtg'
SOURCE_EVOLUTION = 'evo'
SOURCE_RT = 'rt'
SOURCE_REDMINE = 'redmine'
SOURCE_JIRA = 'jira'
JIRA_ISSUE_NAME_REGEX = "^(\w+-\d+): "
ERROR_ADDITIONAL_MESSAGE = '\n\nCheck settings and reopen main window.'
MIN_QUERY_LENGTH = 3
CURRENT_USER_ACTIVITIES_LIMIT = 5
Beispiel #4
0
    def __init__(self, *args, **kwargs):
        self.logger = logging.getLogger("Houdini")

        self.config = config

        self.serverName = kwargs["server"]
        self.server = self.config["Servers"][self.serverName]

        # Set up logging
        generalLogDirectory = os.path.dirname(
            self.server["Logging"]["General"])
        errorsLogDirectory = os.path.dirname(self.server["Logging"]["Errors"])

        if not os.path.exists(generalLogDirectory):
            os.mkdir(generalLogDirectory)

        if not os.path.exists(errorsLogDirectory):
            os.mkdir(errorsLogDirectory)

        universalHandler = RotatingFileHandler(
            self.server["Logging"]["General"],
            maxBytes=2097152,
            backupCount=3,
            encoding="utf-8")
        self.logger.addHandler(universalHandler)

        level = logging.getLevelName(self.server["Logging"]["Level"])
        self.logger.setLevel(level)

        errorHandler = logging.FileHandler(self.server["Logging"]["Errors"])
        errorHandler.setLevel(logging.ERROR)
        self.logger.addHandler(errorHandler)

        engineString = "mysql+{0}://{1}:{2}@{3}/{4}".format(
            self.config["Database"]["Driver"].lower(),
            self.config["Database"]["Username"],
            self.config["Database"]["Password"],
            self.config["Database"]["Address"],
            self.config["Database"]["Name"])

        self.databaseEngine = create_engine(engineString,
                                            pool_recycle=3600,
                                            pool_pre_ping=True)
        self.createSession = sessionmaker(bind=self.databaseEngine)
        self.session = None

        throwawayValue, timeoutInSeconds = \
            self.databaseEngine.execute("SHOW VARIABLES LIKE 'wait_timeout';").first()

        timeoutInSeconds = int(timeoutInSeconds)

        self.sessionValidator = task.LoopingCall(self.validateSession)
        self.sessionValidator.start(timeoutInSeconds - 5)

        self.redis = redis.StrictRedis(host=self.config["Redis"]["Address"],
                                       port=self.config["Redis"]["Port"])
        self.redis.delete("%s.players" % self.serverName)
        self.redis.delete("%s.population" % self.serverName)

        self.players = {}

        self.logger.info("Houdini module initialized")

        self.handlers = {}

        if self.server["World"]:
            self.protocol = Penguin

            cache_regions.update({
                "houdini": {
                    "expire": self.server["CacheExpiry"],
                    "type": "memory"
                }
            })

            self.spawnRooms = (100, 300, 400, 800, 809, 230, 130)

            self.rooms = retrieveRoomCollection()
            self.items = retrieveItemCollection()
            self.furniture = retrieveFurnitureCollection()
            self.igloos = retrieveIglooCollection()
            self.floors = retrieveFloorCollection()
            self.pins = retrievePinCollection()
            self.stampGroups, self.stamps = retrieveStampsCollection()
            self.cards = retrieveCardCollection()
            self.dance = retrieveDanceCollection()

            self.openIgloos = {}

            createTables(self.config["Tables"], self.rooms)
            createWaddles(self.config["Waddles"], self.rooms)
            self.danceFloor = DanceFloor(self.dance)

            self.puffleKiller = task.LoopingCall(decreaseStats, self)
            self.puffleKiller.start(1800)

            self.matchMaker = MatchMaking()

            self.loadHandlerModules(excludeLoad="Houdini.Handlers.Login.Login")
            self.logger.info("Running world server")
        else:
            self.protocol = Spheniscidae
            self.loadHandlerModules("Houdini.Handlers.Login.Login")
            self.logger.info("Running login server")

        self.plugins = {}
        self.loadPlugins()
Beispiel #5
0
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""
__docformat__ = "reStructuredText en"
__authors__ = ['"Jiri Machalek" <*****@*****.**>']

import re
import os
import requests
import logging
from beaker.cache import cache_regions, cache_region

# configure regions
cache_regions.update({"short_term": {"expire": 60, "type": "memory", "key_length": 250}})

DEFAULT_QUEUE = "General"
""" Default queue used. """

TICKET_NAME_REGEX = "^#(\d+): "
DEFAULT_RT_CATEGORY = "RT"

TIMEOUT = 5  # seconds


class Rt:
    """ :term:`API` for Request Tracker according to
    http://requesttracker.wikia.com/wiki/REST. Interface is based on
    :term:`REST` architecture, which is based on HTTP/1.1 protocol. This module
    is therefore mainly sending and parsing special HTTP messages.
Beispiel #6
0
    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""
__docformat__ = "reStructuredText en"
__authors__ = ['"Jiri Machalek" <*****@*****.**>']

import re
import os
import requests
import logging
from beaker.cache import cache_regions, cache_region

# configure regions
cache_regions.update(
    {'short_term': {
        'expire': 60,
        'type': 'memory',
        'key_length': 250
    }})

DEFAULT_QUEUE = 'General'
""" Default queue used. """

TICKET_NAME_REGEX = "^#(\d+): "
DEFAULT_RT_CATEGORY = "RT"

TIMEOUT = 5  # seconds


class Rt:
    """ :term:`API` for Request Tracker according to
    http://requesttracker.wikia.com/wiki/REST. Interface is based on
Beispiel #7
0
from htmlentitydefs import name2codepoint


def htmlentitydecode(s):
    return re.sub('&(%s);' % '|'.join(name2codepoint),
                  lambda m: unichr(name2codepoint[m.group(1)]), s)


cache_opts = {
    'cache.regions': 'short_term, long_term',
    'cache.short_term.type': 'ext:googlememcache',
    'cache.short_term.expire': 3600,
    'cache.long_term.type': 'ext:googlememcache',
    'cache.long_term.expire': 86400,
}
cache_regions.update(**parse_cache_config_options(cache_opts)['cache_regions'])

consumer_key = None
consumer_secret = None
app_url = None

DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
DATETIME_FORMAT_SHORT = "%d/%m %I:%M %p"
CSTTZ = timedelta(hours=8)


def default_range():
    today = date.today()
    to_time = today + timedelta(days=1)
    since_time = today - timedelta(days=7)
    return since_time, to_time
# add a more regions (forever, weekly, daily, yearly), get_game_list for
# example could probably be cached for a substantial length of time (yearly,
# monthly at most) -> fine tuning add functions for invalidating to force
# refresh
cache_regions.update({
    'five_minutes': {
        'expire': 60 * 5,
        'type': 'memory',
        'key_length': 250,
        'data_dir': CACHE_DATA_DIR,
        'lock_dir': CACHE_LOCK_DIR
    },
    'daily': {
        'expire': 60 * 60 * 24,
        'type': 'file',
        'key_length': 250,
        'data_dir': CACHE_DATA_DIR,
        'lock_dir': CACHE_LOCK_DIR
    },
    'annual': {
        'expire': 60 * 60 * 24 * 365,
        'type': 'file',
        'key_length': 250,
        'data_dir': CACHE_DATA_DIR,
        'lock_dir': CACHE_LOCK_DIR
    }
})


class Enum(object):
    '''
Beispiel #9
0
testrun= False

def enableTestrun():
    global testrun
    testrun= True   # oh my god, the pain...

cache_regions.update({
    'mem1h': {          # cache 1 hour in memory, e. g. page ID results
        'expire': 60*60,
        'type': 'memory',
        'key_length': '250'
    },
    'disk24h': {        # cache 24h on disk, e. g. category title => ID mappings
        'expire': 60*60*24,
        'type': 'file',
        'data_dir': beakerCacheDir,
        'key_length': '250'
    },
    'disklongterm': {
        'expire': 60*60*24*30*3,    # 3 months
        'type': 'file',
        'data_dir': beakerCacheDir,
        'key_length': '250'
    }
})

NS_MAIN = 0
NS_TALK = 1
NS_USER = 2
NS_USER_TALK = 3
NS_PROJECT = 4
Beispiel #10
0
# -*- coding: utf-8 -*-
# __author__ = 'Gz'
from get_inflxdb_data import get_position_data
from get_sticek_url import get_url
import json
from beaker.cache import cache_regions, cache_region

cache_regions.update({
    'memory': {
        'expire': 60,
        'type': 'memory'
    },
    'html_memory': {
        'expire': 3600,
        'type': 'memory'
    }
})


@cache_region('memory')
def get_map_data():
    inflxdb_data = get_position_data()
    city_data = {}
    city_sticker = {}
    for data in inflxdb_data:
        try:
            position = json.loads(data['position'].replace("'", '"'))
            city_name = position['city_name']
            sticker_id = data['sticker_id']
            latitude = position['latitude']
            longitude = position['longitude']
Beispiel #11
0
    import urllib.request

import deform
import colander
from beaker.cache import (cache_region,
                          cache_regions)

from pyramid.httpexceptions import HTTPFound
from pyramid.view import view_config

from friendfinder.strava import Athlete

cache_regions.update({
    'short_term':{
        'expire':3600,
        'type':'memory',
        'key_length':80,
    },
})

@cache_region('short_term', 'friends')
def get_friends(id):
    st = Athlete(id)
    start_date = datetime.datetime.today()-datetime.timedelta(days=30)

    segment_ids = []
    rider_ids = {}

    try:
        for ride in st.rides(start_date=start_date):
            for segment in ride.segments:
# add a more regions (forever, weekly, daily, yearly), get_game_list for
# example could probably be cached for a substantial length of time (yearly,
# monthly at most) -> fine tuning add functions for invalidating to force
# refresh
cache_regions.update({
    'five_minutes':{
        'expire' : 60*5,
        'type' : 'memory',
        'key_length' : 250,
        'data_dir' : CACHE_DATA_DIR,
        'lock_dir' : CACHE_LOCK_DIR
    },
    'daily' : {
        'expire' : 60*60*24,
        'type'   : 'file',
        'key_length' : 250,
        'data_dir' : CACHE_DATA_DIR,
        'lock_dir' : CACHE_LOCK_DIR
    },
    'annual' : {
        'expire' : 60*60*24*365,
        'type'   : 'file',
        'key_length' : 250,
        'data_dir' : CACHE_DATA_DIR,
        'lock_dir' : CACHE_LOCK_DIR
    }
})

class Enum(object):
    '''
        Simple enum emulation