Ejemplo n.º 1
0
    def get(self):
        range_header = request.headers.get("Range", None)
        if range_header:
            try:
                client_version = int(range_header.split("-")[0])
            except:
                abort(400)

            redis = Redis()
            pubsub = redis.pubsub()
            pubsub.subscribe([version_pubsub_key])
            server_version = int(redis.get(version_key))
            if server_version == client_version:
                for item in pubsub.listen():
                    if item["type"] == "message":
                        break
        
        version = int(Redis().get(version_key))
        return {
            "version":  version,
            "guests":   [
                {
                    "user":     prepare_user(user),
                    "came":     prepare_visit(current_visit)["came"],
                }
                for current_visit, user in map(lambda visit: (visit, visit.user), db.session.query(GuestVisit).filter_by(left=None))
            ]
        }
Ejemplo n.º 2
0
 def _(*a, **ka):
     redis = Redis(**config.REDIS_INFO)
     apikey = request.params.get('apikey')
     user = redis.get(apikey)
     if apikey is None or user is None:
         return APIKeyNotValidError()
     return func(user=user.decode(), *a, **ka)
Ejemplo n.º 3
0
def knock():
    db = Redis()
    if db.get('lastknock'):
        return render_template('toosoon.html')

    if request.method == 'POST':
        if request.form['person']:
            person = request.form['person']
        else:
            person = '<anonymous>'

        db.setex('lastknock', person, 120) # 120 seconds timeout

        try:
            ircText = '!! %s knocked at the door from %s. Please open.' % (person, request.remote_addr)
            s = xmlrpclib.ServerProxy('http://localhost:8000')
            retCode = s.say('#oslohackerspace', ircText)
            t = xmlrpclib.ServerProxy('http://localhost:8101')
            ret2 = t.ring()
        except:
            retcode = "epic fail!!! :D"
        else:
            retcode = "epic success! :-]"
        return render_template('knocked.html', knockresponse=retcode)

    else:
        return render_template('index.html')
Ejemplo n.º 4
0
 def stop(self):
     env = C.get('env');
     host = C.get('rds_host_' + env)
     db = C.get('rds_db_' + env)
     rds = Redis(host = host, port = 6379, db = db)
     tradeChannel = C.get('channel_trade')
     rds.publish(tradeChannel, 'stop');
Ejemplo n.º 5
0
 def _get_apikey(self):
     redis = Redis(**config.REDIS_INFO)
     for apikey in redis.keys('*'):
         owner = redis.get(apikey).decode()
         if self.name == owner:
             return apikey
     return None
Ejemplo n.º 6
0
Archivo: work.py Proyecto: Answeror/aip
def nonblock_call(f, args=[], kargs={}, timeout=None, bound='cpu', group=None):
    if group is None:
        impl = {
            'cpu': cpu_bound_nonblock_call,
            'io': io_bound_nonblock_call,
        }.get(bound)
        assert impl, 'unknown bound type: %s' % bound
        return impl(f, args, kargs, timeout)

    if bound == 'cpu':
        log.warning(
            'task assigned to group "{}", bound type fall back to "io"',
            group
        )
    assert timeout is not None, 'group task must have timeout setting'

    from .local import core
    from flask import current_app
    from redis import Redis
    redis = Redis()
    run_group_app_task(
        redis,
        group_app_task_lock(group),
        group,
        current_app.kargs,
        timeout
    )
    import pickle
    redis.rpush(
        ':'.join([core.group_app_task_key, group]),
        pickle.dumps((f, args, kargs))
    )
Ejemplo n.º 7
0
class Subscriber(object):
    def __init__(self):
        self.redis = Redis(**settings.REDIS)
        self.events = {}

    def wait(self, game_id):
        if not self.events:
            eventlet.spawn_n(self._listen)

        if game_id not in self.events:
            self.events[game_id] = eventlet.event.Event()
            self.redis.subscribe('scrabble.%d' % game_id)

        # XXX we need to clean up events, otherwise the size of this dict will
        # be unbounded.
        return self.events[game_id].wait()

    def _listen(self):
        # We need to be subscribed to something before redis.listen will work.
        while not self.redis.subscribed:
            eventlet.sleep(1)

        for msg in self.redis.listen():
            if msg['type'] == 'message':
                _, game_id = msg['channel'].split('.')
                game_id = int(game_id)
                msg_type, player_num, msg = msg['data'].split(':', 2)
                self.events[game_id].send()
                self.events[game_id].reset()
Ejemplo n.º 8
0
    def get_samplelist(self):
        result = None
        key = "samplelist:v2:" + self.name
        if USE_REDIS:
            result = Redis.get(key)

        if result is not None:
            #logger.debug("Sample List Cache hit!!!")
            #logger.debug("Before unjsonifying {}: {}".format(type(result), result))
            self.samplelist = json.loads(result)
            #logger.debug("  type: ", type(self.samplelist))
            #logger.debug("  self.samplelist: ", self.samplelist)
        else:
            logger.debug("Cache not hit")

            genotype_fn = locate_ignore_error(self.name+".geno",'genotype')
            mapping_fn = locate_ignore_error(self.name+".fam",'mapping')
            if mapping_fn:
                self.samplelist = get_group_samplelists.get_samplelist("plink", mapping_fn)
            elif genotype_fn:
                self.samplelist = get_group_samplelists.get_samplelist("geno", genotype_fn)
            else:
                self.samplelist = None
            logger.debug("Sample list: ",self.samplelist)
            if USE_REDIS:
                Redis.set(key, json.dumps(self.samplelist))
                Redis.expire(key, 60*5)
Ejemplo n.º 9
0
Archivo: work.py Proyecto: Answeror/aip
def group_app_task_out(lock, name, appops, timeout):
    from redis import Redis
    redis = Redis()
    try:
        group_app_task(redis, lock, name, appops, timeout)
    finally:
        redis.delete(lock)
Ejemplo n.º 10
0
class Database:

    def __init__(self, db_num=0):
        self.redis = Redis(db=db_num)

    @staticmethod
    def concat(*args):
        return '.'.join([str(a) for a in args])

    def get_blocks(self, blocks):
        '''
        :param blocks: list of hashes
        :return: list of Block objects (encodium)
        '''
        return [SimpleBlock.from_json(self.redis.get(str(block_hash)).decode()) for block_hash in blocks]

    def set_kv(self, key, value):
        return self.redis.set(key, serialize_if_encodium(value))

    def get_kv(self, key, optional_type=None):
        result = self.redis.get(key)
        print('getting', key, result)
        if optional_type is not None and result is not None:
            if issubclass(optional_type, Encodium):
                return optional_type.from_json(result.decode())
            return optional_type(result)
        return result
Ejemplo n.º 11
0
def create_datasets_list():
    if USE_REDIS:
        key = "all_datasets"
        result = Redis.get(key)

        if result:
            logger.debug("Redis cache hit")
            datasets = pickle.loads(result)

    if result is None:
        datasets = list()
        with Bench("Creating DataSets object"):
            type_dict = {'Publish': 'PublishFreeze',
                         'ProbeSet': 'ProbeSetFreeze',
                         'Geno': 'GenoFreeze'}

            for dataset_type in type_dict:
                query = "SELECT Name FROM {}".format(type_dict[dataset_type])
                for result in fetchall(query):
                    #The query at the beginning of this function isn't
                    #necessary here, but still would rather just reuse
                    #it logger.debug("type: {}\tname:
                    #{}".format(dataset_type, result.Name))
                    dataset = create_dataset(result.Name, dataset_type)
                    datasets.append(dataset)

        if USE_REDIS:
            Redis.set(key, pickle.dumps(datasets, pickle.HIGHEST_PROTOCOL))
            Redis.expire(key, 60*60)

    return datasets
Ejemplo n.º 12
0
def redis(request):
  redis = Redis()
  redis.flushdb()
  def finalizer():
    redis.delete(TEST_QUEUE_NAME)
  request.addfinalizer(finalizer)
  return redis
Ejemplo n.º 13
0
def send_mail_all():
    """
    God Bless me that can me read this code after a month!
    """
    r = Redis()
    group_levels = []
    while 1:
        group_level = r.spop('group_level_set')
        if group_level == None:
            break
        group_levels.append(group_level)
    print('{0} are to be processed.'.format(group_levels))
    for g_l in group_levels:
        # we do send mail here by group/level pair
        group,level = g_l.split('_')
        mailto_list = get_sendlist_by_group_level(int(group),int(level))
        # if nobody wants this log
        if len(mailto_list) == 0:
            continue
        print('group:{0} level:{1} mailto:{2} is to be sent!'.format(group,level,mailto_list))
        content_list = []
        while 1:
            content = r.lpop(g_l)
            if content == None:
                break
            content_list.append(content)
        body = render_body(content_list)
        SM.SendMail(content=body,mailto=mailto_list)
Ejemplo n.º 14
0
def crawl_tweets_for_event(event_id):
    r = Redis()
    p = HTMLParser()

    total_tweets = 0

    event_title = r.get("festival:%s:title" % event_id).decode("utf-8", errors="ignore")
    event_title = strip_accents(event_title)
    event_title = p.unescape(event_title)
    event_title = remove_stopwords(event_title)

    artists = r.get("festival:%s:artists" % event_id)

    for k, v in eval(artists).items():
        if type(v) == list:
            for artist in v:
                print F, "searching tweets for %s %s" % (k, artist)
                total_tweets += search_term(artist)
        elif type(v) == str:
            print F, "searching tweets for %s %s" % (k, v)
            total_tweets += search_term(v)

    r.incr("festival:%s:crawled_times" % event_id)

    print F, "searching tweets for festival title: %s" % event_title
    total_tweets += search_term(event_title, event_id)  # newsid
    print F, "total tweets: %d" % total_tweets
Ejemplo n.º 15
0
class CloudAggregator:
    """
    This class is responsible for querying remote Cloud sites, retrieving their resource and real time XML,
    aggregate and validate the retrieved XML and then finally storing the aggregated XML into a RedisDB
    """
    # Since this is the "primary" class, it is designed to be instantiated first and thus will load the 
    # global ConfigMapping data
    def __init__(self, logger=None):
        if(logger):
            self.logger = logger
        else:
            self.logger = Logger("cloud_aggregator", "cloud_aggregator.log")
        loadConfig(self.logger) 
        
        #Connect to the RedisDB with the configured options
        self.storageDb = Redis(db=ConfigMapping[TARGET_REDIS_DB], host=ConfigMapping[REDISDB_SERVER_HOSTNAME], port=int(ConfigMapping[REDISDB_SERVER_PORT]))

        # Verify the DB is up and running
        try:
           self.storageDb.ping()
           self.logger.debug("RedisDB server alive")
        except ConnectionError, err:
#            print str(err)
            self.logger.error("redis-server running on desired port? "+str(err))
            sys.exit(RET_CRITICAL)
Ejemplo n.º 16
0
class ScrumApplication(Application):
    def __init__(self, **kwargs):
        routes = [
            (r'/socket', SprintHandler),
            (r'/(?P<model>task|sprint|user)/(?P<pk>[0-9]+)', UpdateHandler),
        ]
        super(ScrumApplication, self).__init__(routes, **kwargs)
        self.subscriber = RedisSubscriber(Client())
        self.publisher = Redis()
        self._key = os.environ.get('WEBSOCKET_SECRET', 'VsXVyL2JYzoQ92pa75QdgDRXLvGrJ6FVjxPORm3E')
        self.signer = TimestampSigner(self._key)

    def add_subscriber(self, channel, subscriber):
        self.subscriber.subscribe(['all', channel], subscriber)

    def remove_subscriber(self, channel, subscriber):
        self.subscriber.unsubscribe(channel, subscriber)
        self.subscriber.unsubscribe('all', subscriber)

    def broadcast(self, message, channel=None, sender=None):
        channel = 'all' if channel is None else channel
        message = json.dumps({
            'sender': sender and sender.uid,
            'message': message
        })
        self.publisher.publish(channel, message)
Ejemplo n.º 17
0
    def stream_via_token(cls, token):
        '''
        Set token user to online and publish presence of this user to all
        friends.
        '''
        NereidUser = Pool().get('nereid.user')

        if hasattr(current_app, 'redis_client'):
            redis_client = current_app.redis_client
        else:
            redis_client = Redis(
                CONFIG.get('redis_host', 'localhost'),
                int(CONFIG.get('redis_port', 6379))
            )

        key = 'chat:token:%s' % token
        if not redis_client.exists(key):
            abort(404)

        nereid_user = NereidUser(int(redis_client.get(key)))
        nereid_user.broadcast_presence()

        return Response(
            cls.generate_event_stream(
                nereid_user.id,
                Transaction().cursor.dbname
            ),
            mimetype='text/event-stream'
        )
Ejemplo n.º 18
0
def main():
    # 指定种子页面
    base_url = 'https://www.zhihu.com/'
    seed_url = urljoin(base_url, 'explore')
    # 创建Redis客户端
    client = Redis(host='1.2.3.4', port=6379, password='******')
    # 设置用户代理(否则访问会被拒绝)
    headers = {'user-agent': 'Baiduspider'}
    # 通过requests模块发送GET请求并指定用户代理
    resp = requests.get(seed_url, headers=headers)
    # 创建BeautifulSoup对象并指定使用lxml作为解析器
    soup = BeautifulSoup(resp.text, 'lxml')
    href_regex = re.compile(r'^/question')
    # 将URL处理成SHA1摘要(长度固定更简短)
    hasher_proto = sha1()
    # 查找所有href属性以/question打头的a标签
    for a_tag in soup.find_all('a', {'href': href_regex}):
        # 获取a标签的href属性值并组装完整的URL
        href = a_tag.attrs['href']
        full_url = urljoin(base_url, href)
        # 传入URL生成SHA1摘要
        hasher = hasher_proto.copy()
        hasher.update(full_url.encode('utf-8'))
        field_key = hasher.hexdigest()
        # 如果Redis的键'zhihu'对应的hash数据类型中没有URL的摘要就访问页面并缓存
        if not client.hexists('zhihu', field_key):
            html_page = requests.get(full_url, headers=headers).text
            # 对页面进行序列化和压缩操作
            zipped_page = zlib.compress(pickle.dumps(html_page))
            # 使用hash数据类型保存URL摘要及其对应的页面代码
            client.hset('zhihu', field_key, zipped_page)
    # 显示总共缓存了多少个页面
    print('Total %d question pages found.' % client.hlen('zhihu'))
Ejemplo n.º 19
0
class TestPyrqClient(unittest.TestCase):
    def setUp(self):
        self._pyrq_client = client.PyRqClient(QUEUE_NAME)
        self._pyrq_client.container = container = ConfigContainer(client.CONFIG_KEY)
        self._pyrq_client._setup()

        configuration = container.config[client.CONFIG_KEY]
        self._redis_client = Redis(host=configuration['host'], port=configuration['port'], db=configuration['db'],
                                   password=configuration['password'], decode_responses=True)
        self._queue = Queue(QUEUE_NAME, self._redis_client)

    def tearDown(self):
        self._redis_client.delete(QUEUE_NAME)

    def test_dispatch(self):
        self._pyrq_client.dispatch('test_method', arg1='aaa', arg2=11)
        expected = {
            'method': 'test_method',
            'params': {
                'arg1': 'aaa',
                'arg2': 11
            }
        }
        actual = self._queue.get_items(1)[0]
        self.assertEquals(expected, json.loads(actual))
        self._queue.ack_item(actual)

    def test_is_empty(self):
        self.assertTrue(self._pyrq_client.is_empty())
        self._pyrq_client.dispatch('whatever')
        self.assertFalse(self._pyrq_client.is_empty())
Ejemplo n.º 20
0
class RedisConnector(object):
    ROBOT_LIBRARY_SCOPE = 'GLOBAL'

    logger = RedisLogger('RedisConnector')
    pool = ConnectionPool(host='localhost', port=6379, db=0, socket_timeout=5)

    def __init__(self):
        self.redis = Redis(connection_pool=RedisConnector.pool)

    def check_key_exists(self, **ka):
        """
        Key existence True/1 or False/0
        """
        return self.redis.exists(name=ka['key'])

    @checker
    def get_key_field(self, **ka):
        return self.redis.hget(name=ka['key'], key=ka['field'])

    @checker
    def get_key_fields(self, **ka):
        return self.redis.hgetall(name=ka['key'])

    @checker
    def get_key_ttl(self, **ka):
        return self.redis.ttl(name=ka['key'])

    def set_key_field(self, **ka):
        return self.redis.hset(name=ka['key'], key=ka['field'], value=ka['value'])
Ejemplo n.º 21
0
class spacecraft_listener(object):
  
  def __init__(self):
    self.redis = Redis()
    self.redis.subscribe(['astrobot-fromIRC'])
    
    self.msl    = MSL()
    self.juno   = Juno()
    self.launches = Launches()
    
  def listen(self):
    for blob in self.redis.listen():
      if blob['type'] == "message":
        data    = json.loads(blob['data'])
        print data
        sender  = data['data']['sender']
        channel = data['data']['channel']
        text    = data['data']['message']
        
        if sender != 'ntron':
          if "!msl" in text.lower():
            Bot.say(channel, self.msl.respond())
          if "!juno" in text.lower():
            Bot.say(channel, self.juno.respond())
          if "!falcon" in text.lower():
            Bot.say(channel, self.launches.launch("falcon9-cots23"))
          if "!soyuz" in text.lower():
            Bot.say(channel, self.launches.launch("SoyuzTMA-03M"))
Ejemplo n.º 22
0
class RedisQueue(object):
    def __init__(self, crawler):
        try:
            from redis import Redis
        except ImportError:
            raise NotConfigured

        settings = crawler.settings

        # get settings
        queue = settings.get("REDIS_QUEUE")
        if queue is None:
            raise NotConfigured

        host = settings.get("REDIS_HOST", "localhost")
        port = settings.getint("REDIS_PORT", 6379)
        db = settings.getint("REDIS_DB", 0)
        password = settings.get("REDIS_PASSWORD")

        self.redis = Redis(host=host, port=port, db=db, password=password)
        self.queue = queue
        self.project = settings["BOT_NAME"]

        crawler.signals.connect(self.spider_closed, signal=signals.spider_closed)

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def spider_closed(self, spider, reason):
        msg = {"project": self.project, "spider": spider.name, "reason": reason}
        self.redis.rpush(self.queue, pickle.dumps(msg))
Ejemplo n.º 23
0
def get_status_message(conn: redis.Redis, uid, timeline='home:', page=1, count=30):
    statuses = conn.zrevrange('%s%s' % (timeline, uid), (page - 1) * count, page * count - 1)
    pipeline = conn.pipeline(True)
    for id in statuses:
        pipeline.hgetall('status:%s' % id)
    # 使用 filter 过滤已经被删除的消息
    return filter(None, pipeline.execute())
Ejemplo n.º 24
0
class DatamartApplication(Application):

    def __init__(self, **kwargs):
        routes = [
            (r'/socket', DatamartHandler),
            (r'/(?P<model>task|sprint|user)/(?P<pk>[0-9]+)', UpdateHandler),
            (r'/websocket', EchoHandler),
            
        ]
        super().__init__(routes, **kwargs)
        self.subscriber = RedisSubscriber(Client())
        self.publisher = Redis()
        self._key = os.environ.get('WATERCOOLER_SECRET',
            'pTyz1dzMeVUGrb0Su4QXsP984qTlvQRHpFnnlHuH')
        self.signer = TimestampSigner(self._key)

    def add_subscriber(self, channel, subscriber):
        self.subscriber.subscribe(['all', channel], subscriber)

    def remove_subscriber(self, channel, subscriber):
        self.subscriber.unsubscribe(channel, subscriber)
        self.subscriber.unsubscribe('all', subscriber)

    def broadcast(self, message, channel=None, sender=None):
        channel = 'all' if channel is None else channel
        message = json.dumps({
            'sender': sender and sender.uid,
            'message': message
        })
        self.publisher.publish(channel, message)
Ejemplo n.º 25
0
def create_user(conn: redis.Redis, login: str, name):
    llogin = login.lower()
    # 加锁防止多个请求在同一时间使用相同用户名创建新用户
    lock = acquire_lock_with_timeout(conn, 'user:'******'users:', llogin):
        # 说明用户已注册
        release_lock(conn, 'user:'******'user:id:')
    pipeline = conn.pipeline(True)
    pipeline.hset('users:', llogin, id)
    pipeline.hmset('user:%s:' % id, {
        'login': login,
        'id': id,
        'name': name,
        'followers': 0,
        'following': 0,
        'posts': 0,
        'signup': time.time()
    })
    pipeline.execute()
    release_lock(conn, 'user:' + llogin, lock)
    return id
Ejemplo n.º 26
0
def crawl(url, currentDepth, countUrls):

    redisCon = Redis(host=conf.REDIS_HOST,
                      port=conf.REDIS_PORT,
                      password=conf.REDIS_PASSWD)

    try:
        headers = dict()
        headers[HTTP_HEADER.USER_AGENT] = randomUserAgents()

        response = requests.get(url, timeout=10, headers=headers)
        # crawlMsg = 'crawled %s depth: %d count: %d' % (url, currentDepth, countVisitedUrls)
        # logger.log(CUSTOM_LOGGING.SYSINFO, crawlMsg)
        content = response.text

        kb.pageEncoding = response.encoding
        conf.cookie = str(response.cookies.get_dict())
        hashData = hashUrl(url)
        redisCon.sadd('visited', hashData)
        redisCon.lpush('visitedList', url)
        getDB().insert({'url':url, 'depth': currentDepth, 'count':countUrls})

    except Exception, ex:
        logger.log(CUSTOM_LOGGING.ERROR, ex)
        # print traceback.print_exc()
        return
Ejemplo n.º 27
0
class ScrumApplication(Application):

    def __init__(self, **kwargs):
        routes = [
         (r'/socket', SprintHandler),
         (r'/(?P<model>task|sprint|user)/(?P<pk>[0-9]+)', UpdateHandler),
        ]
        super().__init__(routes, **kwargs)
        self.subscriber = RedisSubscriber(Client())
        self.publisher = Redis()
        self._key = os.environ.get('TORNADO_SECRET',
            'f56A89be7@37714e0!d890z103b^4f6k380b+25')
        self.signer = TimestampSigner(self._key)

    def add_subscriber(self, channel, subscriber):
        self.subscriber.subscribe(['all', channel], subscriber)

    def remove_subscriber(self, channel, subscriber):
        self.subscriber.unsubscribe(channel, subscriber)
        self.subscriber.unsubscribe('all', subscriber)

    def broadcast(self, message, channel=None, sender=None):
        channel = 'all' if channel is None else channel
        messsage = json.dumps({
            'sender': sender and sender.uid,
            'message': message
            })
        self.publisher.publish(channel, message)
Ejemplo n.º 28
0
def run(architecture_file, model_file, num_batches=10, pattern="centralModel-*"):
    print "evaluating Q values..."
    redisInstance = Redis(host='localhost', port=6379, db=0)
    model_keys = redisInstance.keys(pattern)
    results = {}

    net = BaristaNet(architecture_file, model_file, None)
    replay_dataset = ReplayDataset("temp-q-converge-dset.hdf5",
                                   net.state[0].shape,
                                   dset_size=1000,
                                   overwrite=True)
    net.add_dataset(replay_dataset)

    game = SnakeGame()
    preprocessor = generate_preprocessor(net.state.shape[2:], gray_scale)
    exp_gain = ExpGain(net, ['w', 'a', 's', 'd'], preprocessor, game.cpu_play,
                       replay_dataset, game.encode_state())

    print "Generating new experiences..."
    for _ in xrange(100):
        exp_gain.generate_experience(1e5)
    print "Done"

    for key in model_keys:
        print "Evaluating model:", key
        model = dict(redisC.Dict(key=key, redis=redisInstance))
        q_avg = evaluate_model(net, model, num_batches)
        results[key] = q_avg

    for key in sorted(results.keys()):
        print key.ljust(25) + "%0.4f" % results[key]
Ejemplo n.º 29
0
def start_kuaidi_task():
    r = Redis(host=REDIS_HOST, port=REDIS_PORT)
    kuaidi_key = r.keys("kuaidi:*:auth")
    print kuaidi_key
    if kuaidi_key:
        sid = kuaidi_key[0].split(':')[1]
        loop_kuaidi_task(sid)
Ejemplo n.º 30
0
class Redis:
    def __init__(self, key_prefix, key_sufix):
        self.key_prefix = key_prefix
        self.key_sufix = key_sufix
        self.host = settings.OPPS_DB_HOST
        self.port = settings.OPPS_DB_PORT
        self.db = 0

        pool = ConnectionPool(host=self.host,
                              port=self.port,
                              db=self.db)
        self.conn = RedisClient(connection_pool=pool)

    def object(self):
        return self.conn

    def close(self):
        self.conn = None
        return True

    @property
    def key(self):
        return u'{}_{}_{}'.format(settings.OPPS_DB_NAME,
                                  self.key_prefix,
                                  self.key_sufix).lower()

    def save(self, document):
        return self.conn.set(self.key, document)

    def publish(self, document):
        return self.conn.publish(self.key, document)

    def get(self):
        return self.conn.get(self.key)
Ejemplo n.º 31
0
import json
import os
from redis import Redis
from redis.exceptions import ConnectionError

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("first_name", type=str, nargs="?", default="Beyond")
    parser.add_argument("last_name", type=str, nargs="?", default="Creation")

    args = parser.parse_args()

    first_name = args.first_name
    last_name = args.last_name
    environment = os.getenv("ENVIRONMENT", "dev")

    redis = Redis(
        host="redis-service",  # Which host to find the redis-server
        port=4321  # Which port to find the redis-server
    )

    try:
        redis.set("first-name", first_name)
        redis.set("last-name", last_name)
        redis.set("environment", environment)
    except ConnectionError:
        print("Not possible to connect to redis-server:6379")
        exit(1)

    exit(0)
Ejemplo n.º 32
0
from redis import Redis
from django.conf import settings

redis_client = Redis.from_url(settings.REDIS_CON_URI)
Ejemplo n.º 33
0
from flask import Flask, request, jsonify
from redis import Redis
import pickle

app = Flask(__name__)
redis = Redis(host='redis', port=6379)

@app.route('/', methods=['GET'])
def hello():
 count = redis.incr('hits')
 return 'Hello World! I have been seen {} times.\n'.format(count)

@app.route('/my_list/', methods=['POST'])
def push_to_redis():
 data = request.json
 redis.lpush('my_list', pickle.dumps(data))
 return jsonify(data), 201

if __name__ == '__main__':
 app.run(host='0.0.0.0', debug=True)

Ejemplo n.º 34
0
from redis import Redis

redis_connection = Redis(decode_responses=True)

key = "some-key"
value = 55

redis_connection.set(key, value)
print(redis_connection.get(key))

print(redis_connection.incr(key, 5))

print(redis_connection.decr(key, 20))
def redis_client():
    password = os.environ.get('REDIS_PASSWORD')
    r = Redis(host='redis', password=password)
    yield r
Ejemplo n.º 36
0
import json
import rq
from redis import Redis
from rq import Queue

q = Queue(connection=Redis())

#Once we've processed everything into a single JSON, split it up and enqueue them in groups of 20 for easier downloading.

x = open("all.json")
l = list(json.loads(x.readlines()[0]))
n = 20
allList = [l[i:i + n] for i in xrange(0, len(l), n)]
print allList
for listSet in allList:
    result = q.enqueue('pbldown.downloadArray', json.dumps(listSet))
Ejemplo n.º 37
0
#!/usr/bin/env python
# coding: utf-8

from flask import Flask
from redis import Redis, RedisError
import os
import socket

# Connect to Redis
redis = Redis(host="redis", db=0, socket_connect_timeout=2, socket_timeout=2)

app = Flask(__name__)


@app.route("/")
def hello():
    try:
        visits = redis.incr("counter")
    except RedisError:
        visits = "<i>cannot connect to Redis, counter disabled</i>"

    html = "<h3>Hello {name}!</h3>" \
           "<b>Hostname:</b> {hostname}<br/>" \
           "<b>Visits:</b> {visits}"
    return html.format(name=os.getenv("NAME", "world"),
                       hostname=socket.gethostname(),
                       visits=visits)


if __name__ == "__main__":
    app.run(host='0.0.0.0', port=80)
Ejemplo n.º 38
0
 def new_dict(self, keyspace=None, *args, **kwargs):
     return RedisDict(keyspace or self.keyspace,
                      Redis(),
                      autosync=False,
                      *args,
                      **kwargs)
Ejemplo n.º 39
0
def get_progress():
    fulljob = get_current_job(connection=Redis.from_url(
        'redis://ec2-54-146-142-160.compute-1.amazonaws.com:6379'),
                              job_class=full_simulator())
    return fulljob.meta.get('progress', 0) if fulljob is not None else 100
Ejemplo n.º 40
0
def vbox_remote_control(uuid, box):
    ret = False
    try:
        queue = Redis.from_url(redis_settings_docker)
        virtual_machine = VirtualBox().find_machine(box["vm"])
        vm_name_lock = "{}_lock".format(box["vm"])
        vm_name_frame = "{}_frame".format(box["vm"])
        vm_name_action = "{}_action".format(box["vm"])
        with virtual_machine.create_session() as session:
            session.unlock_machine()
            proc = virtual_machine.launch_vm_process(session, "headless", "")
            proc.wait_for_completion(timeout=-1)
            with session.console.guest.create_session(box["user"],
                                                      box["pass"]) as gs:
                h, w, _, _, _, _ = session.console.display.get_screen_resolution(
                    0)
                update_item(mongo_settings_docker["worker_db"],
                            mongo_settings_docker["worker_col_logs"], uuid, {
                                "status": "live",
                                "started_time": datetime.now()
                            })
                queue.set(vm_name_lock, "False")
                while queue.get(vm_name_lock) == b"False":
                    x, y, dz, dw, button_state, key = "false", "false", "false", "false", "false", "false"
                    try:
                        t = queue.get(vm_name_action)
                        if t and t != "None":
                            x, y, dz, dw, button_state, key = loads(t)
                            #log_string(uuid,">>>>>>>>>> {} {} {} {} {} {}".format(x,y,dz,dw,button_state,key),"Red")
                    except e:
                        pass
                    try:
                        if key != "false":
                            session.console.keyboard.put_scancodes(
                                list(scan_code_table[key]))
                        if "false" not in (x, y, dz, dw, button_state):
                            session.console.mouse.put_mouse_event_absolute(
                                x, y, dz, dw, 0)
                            if button_state == "leftclick":
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 1)
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 0)
                            elif button_state == "leftdoubleclick":
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 1)
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 0)
                            elif button_state == "rightclick":
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 2)
                                session.console.mouse.put_mouse_event_absolute(
                                    x, y, dz, dw, 0)
                        queue.set(vm_name_action, "None")
                        png = session.console.display.take_screen_shot_to_array(
                            0, h, w, BitmapFormat.png)
                        queue.set(vm_name_frame, png)
                    except:
                        pass
                    sleep(.2)
                ret = True
            session.console.power_down()
    except Exception as e:
        log_string(uuid, "custom_task Failed {}".format(e), "Red")
    return ret
Ejemplo n.º 41
0
Save information into Redis data store.
"""
import os
import logging

from flask import Flask, json
from flask_cors import CORS
from flask import request
from redis import Redis
from urllib import parse
from werkzeug.exceptions import BadRequest

app = Flask(__name__)
CORS(app)
redis = Redis(host=os.environ['REDIS_HOST'], port=os.environ['REDIS_PORT'])
bind_port = int(os.environ['BIND_PORT'])


def pageview_update(req_form):
    """Increment the page view and the total views count."""
    page_path = req_form['path']
    client_id = req_form['clientid']
    redis.incr("page:" + page_path + ":" + client_id)
    redis.incr("pagetotal:")


def click_update(req_form):
    """Increment the clicks count."""
    client_id = req_form['clientid']
    redis.incr("clicks:" + client_id)
Ejemplo n.º 42
0
import redis
from redis import Redis
import time
from IPython import embed
from collections import Counter
from random import randint

# res = redis.StrictRedis(host='127.0.0.1', port=6379)
res = Redis(host='127.0.0.1', port=6379, db=11)
res.set('name', 'sunjianshi')
print(res.get('name'))

res.zincrby('name', 23)
# res.zadd('article_click', 1, 123)
# res.zadd('article_click', 2, 456)
# res.zadd('article_click', 3, 321)
# res.zadd('article_click', 4, 351)
# res.zadd('article_click', 5, 371)
# res.zadd('article_click', 6, 311)
# res.zadd('name',)

s = res.zrevrange('article_click', 0, 3, withscores=True)
print(s)
Ejemplo n.º 43
0
 def _post(self, params):
     rds = Redis(host=os.getenv('REDIS_HOST'))
     rds.hset('num_jobs', str(params['gid']), params['num_jobs'])
     rds.hset('num_complete', str(params['gid']), params['num_complete'])
     return {'message': f"Progress summary sent for group ID {params['gid']}!"}
Ejemplo n.º 44
0
from redis import Redis
from asyncspider import AsyncSpider
import json
import asyncio

rds = Redis(host='localhost', port=6379, db=0)
spr = AsyncSpider()


async def worker(i):
    import time
    print('worker:', i)
    while True:
        msg = rds.rpop('moxing_msg')
        if not msg:
            time.sleep(1)
            continue
        msg = msg.decode('utf-8')
        argues = msg.split('_')
        fid, page = argues[0], int(argues[1])
        keys = 'moxing_' + fid + '_' + str(page)

        start = time.time()
        print('start:', fid, page)

        res = rds.get(keys)
        if not res:
            infos = await spr.get_page_list(fid=fid,
                                            page=page,
                                            start_page=page,
                                            objs=[])
Ejemplo n.º 45
0
def delete_shot(player_id: int, shot_id: int, redis: Redis = Depends(get_redis)):
    if not redis.sismember(f"player:{player_id}:shots", shot_id):
        raise HTTPException(404)
    schemas.Shot.delete(redis, shot_id)
    redis.srem(f"player:{player_id}:shots", shot_id)
    return schemas.PlayerResult.find(redis, player_id)
Ejemplo n.º 46
0
 def connect(self) -> Redis:
     pool = self.get_or_create_connection_pool()
     return Redis(connection_pool=pool)
Ejemplo n.º 47
0
def find_player(player_id: int, redis: Redis = Depends(get_redis)):
    if not redis.sismember("players", player_id):
        raise HTTPException(404)
    return schemas.PlayerResult.find(redis, player_id)
Ejemplo n.º 48
0
Archivo: util.py Proyecto: xlnrp/tator
def clearStaleProgress(project, ptype):
    from redis import Redis
    if ptype not in ['upload', 'algorithm', 'transcode']:
        print("Unknown progress type")

    Redis(host=os.getenv('REDIS_HOST')).delete(f'{ptype}_latest_{project}')
Ejemplo n.º 49
0
def edit_player(
    player_id: int, data: schemas.PlayerCreate, redis: Redis = Depends(get_redis)
):
    redis.set(f"player:{player_id}:name", data.name)
    return schemas.PlayerResult.find(redis, player_id)
Ejemplo n.º 50
0
def new_player(data: schemas.PlayerCreate, redis: Redis = Depends(get_redis)):
    new_id = redis.incr("next_player_id")
    redis.set(f"player:{new_id}:name", data.name)
    redis.sadd("players", new_id)
    return schemas.PlayerResult.find(redis, new_id)
Ejemplo n.º 51
0
 def setup_job_queues(self):
     self.conn = Redis('localhost', 6379)
     self.generate_queue = Queue('generate', connection=self.conn)
     self.email_queue = Queue('notify_email', connection=self.conn)
     return self.generate_queue
Ejemplo n.º 52
0
def list_players(redis: Redis = Depends(get_redis)):
    return [schemas.PlayerResult.find(redis, p) for p in redis.smembers("players")]
Ejemplo n.º 53
0
sys.path.append(JETO_PATH)

from rq import Queue, Worker, Connection
from rq import get_current_job
from rq.decorators import job
from sh import git
import os
import logging
import sh
from sh import ErrorReturnCode, errno
import re
import json
import time
from redis import Redis
import requests
redis_conn = Redis()

from jeto.models.host import Host

basedir = os.path.abspath(os.path.dirname(__file__))
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(
    '/var/log/vagrant-worker/debug.log'.format(basedir))
# formatter = logging.Formatter('%(levelname) -10s %(asctime)s\
#    %(module)s:%(lineno)s %(funcName)s %(message)s')

# handler.setFormatter(formatter)
logger.addHandler(handler)
current_job = None
Ejemplo n.º 54
0
def get_tmp_client(*args, **kwargs):
    from redis import Redis
    return Redis(*args, **kwargs)
Ejemplo n.º 55
0
from rq import Queue
from redis import Redis
from flask import Flask, json
from flask_restful import Api, Resource, reqparse, inputs

import sms
import mail
import config
import telegram
import dbdriver as db
from templates import Netwatch, Update


config = config.get(None)

q = Queue(connection=Redis(config['redis']['host'], config['redis']['port']))

app = Flask(__name__)
api = Api(app)


class RESTDB(Resource):

    def __init__(self):
        self.endpoints = {'host': 'hosts', 'router': 'routers', 'user': '******'}

    def get(self, name):
        result = ''
        if name in self.endpoints:
            job = q.enqueue(db.dump, args=(self.endpoints[name],))
            while job.result is None:
Ejemplo n.º 56
0
from redis import Redis

rd = Redis(host='121.199.63.71', port=6378, db=1)

if __name__ == '__main__':
    print(rd.keys("*"))
    rd.flushall()
Ejemplo n.º 57
0
# '''
# 多模块之间的工具包
# '''
import logging
import os
import re

from redis import Redis

from MArtPro import settings

# 声明redis缓存对象
from MArtPro.settings import REDIS_CACHE

redis_cache = Redis(**REDIS_CACHE)


def mvImage(filePath, dstDir):
    '''
    将filePath位置的文件,移动到dstDir目录下
    :param filePath:
    :param dstDir:
    :return:
    '''
    # 读取到文件名
    tmpDir, fileName = os.path.split(filePath)

    with open(filePath, 'rb') as rf:
        with open(os.path.join(dstDir, fileName), 'wb') as wf:
            wf.write(rf.read())
Ejemplo n.º 58
0
 def __init__(self):
     self.__pool__ = ConnectionPool(host=host, port=port)
     self._redis_conn = Redis(connection_pool=self.__pool__)
     base_info("Redis连接成功!")
Ejemplo n.º 59
0
import uiautomator2 as u2
import time
import os
import logging
from redis import Redis
from greet import Greet
redis_cli = Redis(host='118.31.66.50')

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('Soul Monitor')


class TianGou:
    """
    soul click star
    适配分辨率,如果不同需要该参数
    displayHeight: 2029,
    displayWidth: 1080,
    """
    def __init__(self, type, deviceid):
        """
        初始化鏈接指定的设备
        :param deviceid: 设备 device  ID
        """
        while True:
            try:
                if type == 'usb':
                    self.d = u2.connect_usb(deviceid)
                else:
Ejemplo n.º 60
0
"""
Start Redis
    redis-server
Start RQ (Redis Queue)
    rq worker
python redis_queue_worker.py

Celery Vs RQ
    RQ is designed to be simpler all around.
    Celery is designed to be more robust.
"""

from rq import Queue
from redis import Redis
from redis_queue_test import count_words_at_url
import time

# Tell RQ what Redis connection to use
redis_conn = Redis(host='localhost', port=6379, db=0)
q = Queue(connection=redis_conn)  # no args implies the default queue

# Delay execution of count_words_at_url('http://nvie.com')
job = q.enqueue(count_words_at_url, 'http://google.com')
print(job.result)  # => None

# Now, wait a while, until the worker is finished
time.sleep(2)
print(job.result)  # => 311