コード例 #1
0
 def __init__(self, host='localhost', port=6379, password=None, db=0):
     self.client = redis.StrictRedis(host=host,
                                     port=port,
                                     password=password,
                                     db=db)
コード例 #2
0
ファイル: plotcontrol.py プロジェクト: XuZikang/eegsynth
parser = argparse.ArgumentParser()
parser.add_argument("-i",
                    "--inifile",
                    default=os.path.join(
                        installed_folder,
                        os.path.splitext(os.path.basename(__file__))[0] +
                        '.ini'),
                    help="optional name of the configuration file")
args = parser.parse_args()

config = configparser.ConfigParser(inline_comment_prefixes=('#', ';'))
config.read(args.inifile)

try:
    r = redis.StrictRedis(host=config.get('redis', 'hostname'),
                          port=config.getint('redis', 'port'),
                          db=0)
    response = r.client_list()
except redis.ConnectionError:
    print("Error: cannot connect to redis server")
    exit()

# combine the patching from the configuration file and Redis
patch = EEGsynth.patch(config, r)

# this determines how much debugging information gets printed
debug = patch.getint('general', 'debug')

input_name, input_variable = list(zip(*config.items('input')))

# count total nr. of curves to be drawm
コード例 #3
0
ファイル: csv_insertion.py プロジェクト: cswpy/meetup
#coding=utf-8

import csv
import redis
import json

r = redis.StrictRedis(host='127.0.0.1', port=6379)

with open('./dianping_database_edited.csv', 'rt') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
        id = row['OBJECTID']
        data_encoded = json.dumps(row)
        r.set(id, data_encoded)  #将内容写入redis
'''
        ID = row['OBJECTID']
        name = row['名字']
        location = row['地区']
        category = row['大类']
        cuisine = row['菜系14']
        cuisine_id = row['FieldID']
        addr = row['地址']
        mean_price = row['人均价格']
        rating = row['人均价格']
        longitude = row['坐标1']
        latitude = row['坐标2']
        taste_score = row['口味分']
        env_score = row['环境分']
        service_score = row['服务分']
'''
コード例 #4
0
ファイル: QueryUserOrder.py プロジェクト: lujg/ShunLu
 def __init__(self):
     self.rds = redis.StrictRedis(shunlu_config.redis_ip,
                                  shunlu_config.redis_port)
コード例 #5
0
SLEEP_TIME_IN_SECONDS = 10
NEWS_TIME_OUT_IN_SECONDS = 3600 * 24 * 3

REDIS_HOST = 'localhost'
REDIS_PORT = 6379

SCRAPE_NEWS_TASK_QUEUE_URL = 'localhost'
SCRAPE_NEWS_TASK_QUEUE_NAME = 'tap-news-scrape-news-task-queue'

NEWS_SOURCES = [
    'bbc-news', 'bbc-sport', 'bloomberg', 'cnn', 'entertainment-weekly',
    'espn', 'ign', 'techcrunch', 'the-new-york-times',
    'the-wall-street-journal', 'the-washington-post'
]

redis_client = redis.StrictRedis(REDIS_HOST, REDIS_PORT)
cloudAMQP_client = CloudAMQPClient(SCRAPE_NEWS_TASK_QUEUE_URL,
                                   SCRAPE_NEWS_TASK_QUEUE_NAME)

# 每过10秒调用一次news_api,查看有没有新的news,
# 当然,返回的news可能在本地已经有了,
# 利用redis做一次查重,如果确实是新的news,那么扔进队列
while True:
    news_list = news_api_client.get_news_from_source(NEWS_SOURCES)
    num_of_news_news = 0
    # print(news_list)

    for news in news_list:
        news_digest = hashlib.md5(
            news['title'].encode('utf-8')).digest().encode('base64')
コード例 #6
0
        raise
        print "trouble showing formatted result"
        print json.dumps(result, indent=2)
        print


def fmt_date(ts):
    the_date = date.fromtimestamp(ts)
    return the_date.strftime("%Y-%m-%d")


def mk_sched_key(op, user, pid):
    return 'sched-' + op + '-' + user + '-' + pid


if __name__ == '__main__':
    import random
    import spotify_auth

    my_redis = redis.StrictRedis(host='localhost', port=6379, db=0)
    my_auth = spotify_auth.SpotifyAuth(r=my_redis)
    my_pm = program_manager.ProgramManager(my_auth, r=my_redis)

    sched = Scheduler(my_redis, my_pm)

    threads = 5
    if len(sys.argv) > 1:
        threads = int(sys.argv[1])

    sched.start_processing_threads(threads)
コード例 #7
0
ファイル: identidock.py プロジェクト: martrics/identidock
from flask import Flask, Response, request
import requests
import hashlib
import redis
import html

app = Flask(__name__)
cache = redis.StrictRedis(host='redis', port=6379, db=0)
salt = "UNIQUE_SALT"
default_name = 'Joe Bloggs'


@app.route('/', methods=['GET', 'POST'])
def main_page():
    name = default_name
    if request.method == 'POST':
        name = html.escape(request.form['name'], quote=True)
    salted_name = salt + name
    name_hash = hashlib.sha256(salted_name.encode()).hexdigest()

    header = '<html><head><titile>Identidock</title></head><body>'
    body = '''<form method="POST">Hello <input type="text" name="name" value="{0}"> \
			<input type="submit" value="submit">\
			</form>\
			<p>You look like a:\
			<img src="/monster/{1}" />'''.format(name, name_hash)
    footer = '</body></html>'

    return header + body + footer

コード例 #8
0
ファイル: report.py プロジェクト: lukehuang/opsweb-1
from Modules import check, produce, db_op, db_idc, loging, MyForm, tools
from sqlalchemy import func, and_
from pyecharts import Bar, Tree, Pie, Line
import redis
from functools import reduce
from flask_sqlalchemy import SQLAlchemy
import datetime, time
app = Flask(__name__)
app.config.from_pyfile('../conf/redis.conf')
app.config.from_pyfile('../conf/sql.conf')
DB = SQLAlchemy(app)
logging = loging.Error()
redis_host = app.config.get('REDIS_HOST')
redis_port = app.config.get('REDIS_PORT')
redis_password = app.config.get('REDIS_PASSWORD')
RC = redis.StrictRedis(host=redis_host, port=redis_port, decode_responses=True)
redis_data = app.config.get('REDIS_DATA')
RC_CLUSTER = redis.StrictRedis(host=redis_data,
                               port=redis_port,
                               decode_responses=True)
page_report = Blueprint('report', __name__)


@page_report.route('/resource_report')
def resource_report():
    form = MyForm.Form_resource_report()
    days = tools.http_args(request, 'days')
    date = datetime.datetime.now()
    db_server = db_idc.idc_servers
    if days:
        days = int(days)
コード例 #9
0
 def RedisDB(cls, decode_responses=True):
     return (redis.StrictRedis(host="redis",
                               port=6379,
                               db=0,
                               decode_responses=decode_responses))
コード例 #10
0
ファイル: extensions.py プロジェクト: ywmmmw/collipa
# coding: utf-8

import re
from collipa import config
# import memcache
import redis
import pickle as pickle
from functools import wraps
from collipa.libs.redis_port import RedisPort

# mc = memcache.Client(['127.0.0.1:11211'], debug=1)
rd = redis.StrictRedis(host=config.rd_host, port=config.rd_port, db=0)

mc = RedisPort(rd)


def memcached(key, limit=86400):
    def wrap(func):
        @wraps(func)
        def get_value(*args, **kwargs):
            '''
            TODO: 暂时忽略掉,以后处理
            value = mc.get(key)
            if not value:
                value = func(*args, **kwargs)
                mc.set(key, value, limit)
            return value
            '''
            return func(*args, **kwargs)

        return get_value
コード例 #11
0
#!/usr/bin/env python
import nasa
import json
import redis
import datetime
import time
import config

r = redis.StrictRedis(host=config.redis_host,
                      port=config.redis_port,
                      db=config.redis_db)


def add_image(sol, t, image):

    # All Feed
    r.zadd('msl-all-feed', t, json.dumps(image))
    r.sadd('msl-feeds', 'msl-all-feed')
    r.set('msl-all-feed-name', 'Latest Images from MSL')

    # instrument feed
    feedname = 'msl-%s-feed' % image['inst']
    r.zadd(feedname, t, json.dumps(image))
    r.sadd('msl-feeds', feedname)
    r.set('%s-name' % feedname,
          'Latest Images from %s on MSL' % nasa.MSL_KEY[image['inst']]["name"])

    # No Thumbs:
    if image['t'] == False:
        # All Feed
        r.zadd('msl-all-feed-nothumb', t, json.dumps(image))
コード例 #12
0
ファイル: test_dashboard.py プロジェクト: zaouk/ray
def test_basic(ray_start_with_dashboard):
    """Dashboard test that starts a Ray cluster with a dashboard server running,
    then hits the dashboard API and asserts that it receives sensible data."""
    assert (wait_until_server_available(ray_start_with_dashboard["webui_url"])
            is True)
    address_info = ray_start_with_dashboard
    node_id = address_info["node_id"]
    address = address_info["redis_address"]
    address = address.split(":")
    assert len(address) == 2

    client = redis.StrictRedis(
        host=address[0],
        port=int(address[1]),
        password=ray_constants.REDIS_DEFAULT_PASSWORD)

    all_processes = ray.worker._global_node.all_processes
    assert ray_constants.PROCESS_TYPE_DASHBOARD in all_processes
    assert ray_constants.PROCESS_TYPE_REPORTER not in all_processes
    dashboard_proc_info = all_processes[ray_constants.PROCESS_TYPE_DASHBOARD][
        0]
    dashboard_proc = psutil.Process(dashboard_proc_info.process.pid)
    assert dashboard_proc.status() in [
        psutil.STATUS_RUNNING, psutil.STATUS_SLEEPING, psutil.STATUS_DISK_SLEEP
    ]
    raylet_proc_info = all_processes[ray_constants.PROCESS_TYPE_RAYLET][0]
    raylet_proc = psutil.Process(raylet_proc_info.process.pid)

    def _search_agent(processes):
        for p in processes:
            try:
                for c in p.cmdline():
                    if "new_dashboard/agent.py" in c:
                        return p
            except Exception:
                pass

    # Test for bad imports, the agent should be restarted.
    logger.info("Test for bad imports.")
    agent_proc = _search_agent(raylet_proc.children())
    prepare_test_files()
    agent_pids = set()
    try:
        assert agent_proc is not None
        agent_proc.kill()
        agent_proc.wait()
        # The agent will be restarted for imports failure.
        for x in range(50):
            agent_proc = _search_agent(raylet_proc.children())
            if agent_proc:
                agent_pids.add(agent_proc.pid)
            # The agent should be restarted,
            # so we can break if the len(agent_pid) > 1
            if len(agent_pids) > 1:
                break
            time.sleep(0.1)
    finally:
        cleanup_test_files()
    assert len(agent_pids) > 1, agent_pids

    agent_proc = _search_agent(raylet_proc.children())
    if agent_proc:
        agent_proc.kill()
        agent_proc.wait()

    logger.info("Test agent register is OK.")
    wait_for_condition(lambda: _search_agent(raylet_proc.children()))
    assert dashboard_proc.status() in [
        psutil.STATUS_RUNNING, psutil.STATUS_SLEEPING
    ]
    agent_proc = _search_agent(raylet_proc.children())
    agent_pid = agent_proc.pid

    # Check if agent register is OK.
    for x in range(5):
        logger.info("Check agent is alive.")
        agent_proc = _search_agent(raylet_proc.children())
        assert agent_proc.pid == agent_pid
        time.sleep(1)

    # The agent should be dead if raylet exits.
    raylet_proc.kill()
    raylet_proc.wait()
    agent_proc.wait(5)

    # Check redis keys are set.
    logger.info("Check redis keys are set.")
    dashboard_address = client.get(dashboard_consts.REDIS_KEY_DASHBOARD)
    assert dashboard_address is not None
    dashboard_rpc_address = client.get(
        dashboard_consts.REDIS_KEY_DASHBOARD_RPC)
    assert dashboard_rpc_address is not None
    key = f"{dashboard_consts.DASHBOARD_AGENT_PORT_PREFIX}{node_id}"
    agent_ports = client.get(key)
    assert agent_ports is not None
コード例 #13
0
from wikipedia_helper import preprocess_search_text

from argparse import ArgumentParser

parser = ArgumentParser()
parser.add_argument('-i',
                    '--hostip',
                    type=str,
                    help='host IP for MongoDB/Redis')
parser.add_argument('-p', '--predict', type=str, help='Text to predict')

args = parser.parse_args()

# Connect to mongo and redis using the host IP argument
mgclient = pymongo.MongoClient(args.hostip)
r = redis.StrictRedis(args.hostip)

# Load fit models from Redis
lsa_vectorizer = pickle.loads(r.get('wiki_vectorizer'))
fit_svd = pickle.loads(r.get('wiki_fit_svd'))
knn_model = pickle.loads(r.get('wiki_knn_model'))
pageid_title_df = pickle.loads(r.get('pageid_title_df'))

# Preprocess new article text for prediction
predict_text = [line.rstrip('\n') for line in open(args.predict)]
predict_text = ','.join(predict_text)
predict_lsa = preprocess_search_text(args.predict, lsa_vectorizer, fit_svd)

# Predict the category of the new text and calculate confidence score of prediction
print("\nPredicted Category:", knn_model.predict(predict_lsa)[0])
print("Confidence Score:", knn_model.predict_proba(predict_lsa).max())
コード例 #14
0
 def __init__(self):
     # 创建对本机数据库的连接对象
     pool = redis.ConnectionPool(host='127.0.0.1', port=6379, db=0)
     self.conn = redis.StrictRedis(connection_pool=pool)
コード例 #15
0
from keras.applications import imagenet_utils
from PIL import Image
import numpy as np
import settings
import helpers
import flask
import redis
import uuid
import time
import json
import io

# initialize our Flask application and Redis server
app = flask.Flask(__name__)
db = redis.StrictRedis(host=settings.REDIS_HOST,
                       port=settings.REDIS_PORT,
                       db=settings.REDIS_DB)


def prepare_image(image, target):
    # if the image mode is not RGB, convert it
    if image.mode != "RGB":
        image = image.convert("RGB")

    # resize the input image and preprocess it
    image = image.resize(target)
    image = img_to_array(image)
    image = np.expand_dims(image, axis=0)
    image = imagenet_utils.preprocess_input(image)

    # return the processed image
コード例 #16
0
ファイル: get_url.py プロジェクト: 18670775011/BossSpider
def write_to_redis(url_list):
    rds = redis.StrictRedis(host='10.36.133.177', port=6379, db=0)
    for url in url_list:
        rds.lpush(url)
コード例 #17
0
ファイル: ktg.py プロジェクト: swizzard/ktg.io
# coding=utf8
__author__ = 'Sam Raker'

import os
import json
from random import sample
import urlparse

import redis

with open('config.json') as f:
    config = json.load(f)

r_url = urlparse.urlparse(os.environ.get("REDISCLOUD_URL"))
r = redis.StrictRedis(host=r_url.hostname,
                      port=r_url.port,
                      password=r_url.password)

four_rng = xrange(len(r.keys("*_4")) - 1)
eight_rng = xrange(len(r.keys("*_8")) - 1)


def get_fours(dupes=True):
    if dupes:
        return r.get('{}_4'.format(sample(four_rng,
                                          1)[0])), r.get('{}_4'.format(
                                              sample(four_rng, 1)[0]))
    else:
        x, y = sample(four_rng, 2)
        return r.get('{}_4'.format(x)), r.get('{}_4'.format(y))
コード例 #18
0
#------------------------------------------
# CONSTANTS
#------------------------------------------

table_name = 'VisualSearchMetadata'
endpoint_name = 'knn-2018-07-24-17-50-52-071'
redis_hostname = 'visual-search-2.de4w70.0001.use1.cache.amazonaws.com'

#------------------------------------------

logger = logging.getLogger()
logger.setLevel(logging.INFO)

dynamodb = boto3.resource('dynamodb')
r = redis.StrictRedis(host=redis_hostname,
                      port=6379,
                      db=0,
                      decode_responses=True)

runtime = boto3.client('runtime.sagemaker')


def lambda_handler(event, context):

    #---------------------------------------------
    #  UNPACK QUERY
    #---------------------------------------------

    # disregard messages other than those containing features
    if 'features' not in event:
        logger.info(event)
        return
コード例 #19
0
def delete_cluster_redis_data(cluster_name):
    redis_cli = redis.StrictRedis(host=fit2ansible.settings.REDIS_HOST,
                                  port=fit2ansible.settings.REDIS_PORT)
    return redis_cli.delete(cluster_name)
コード例 #20
0
ファイル: testMatch.py プロジェクト: lty9520/crawlerDemo
        "mode": "normal-match",
        "ext": "js,java,py"
    },
    {
        "types": "bankofbeijing",
        "corp_name": "bob",
        "rule_keyword": "test",
        "mode": "normal-match",
        "ext": "js,java,py"
    },
]

# r = DbClient('redis://:[email protected]:6379/0')
pool = redis.ConnectionPool(host='127.0.0.1', port=6379, db=1, password='******')
# pool = redis.ConnectionPool.from_url('redis://:[email protected]:6379/1')
r = redis.StrictRedis(connection_pool=pool)
# rule_rds = eval(str(r.hget("keywords_rule", "test"), 'utf-8'))
rule_rds = {"types":"bankofbeijing","corp_name":"corp_bob","rule_keyword":"hello","mode":"normal-match","extension":"php,java,python,go,js,properties"}

kws = rule_rds['rule_keyword']
if ' ' in kws:
    kw = kws.split(' ')
else:
    kw = [kws]

# kw = kws.split(' ')
print("kw:")
print(kw)

print('-'*40)
コード例 #21
0
ファイル: codigoenvio.py プロジェクト: DanielClavijo0508/Dash
            data = f.readline()
            data = data.strip('\n')
            fecha = f.readline()
            fecha = fecha.strip('\n')
            datos.append(pgn + "-" + data + "-" + fecha)
            print("PGN:" + pgn + " Data:" + data + " Fecha:" + fecha)
            print(datos[a])
            print(a)
            f.close()
            a = a + 1


#db = Redis(db=10)
db = redis.StrictRedis('127.0.0.1',
                       6379,
                       db=10,
                       charset="utf-8",
                       decode_responses=True)


def envio():
    b = 0
    #    global datos
    #while b<10:
    for row in datos:
        pgn, data, fecha = row.split("-")
        #pgn = row.split("-")[1]
        #print("hola"+pgn+data)
        o = {'PGN': pgn, 'T': fecha, 'Data': data}
        db.set(pgn, json.dumps(o))
        #b=b+1
コード例 #22
0
class BigoShowSpider(scrapy.Spider):
    name = 'bigo_show_bak'
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
    }
    headers1 = {
        'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
        'user-agent':
        'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36',
        'x-requested-with': 'XMLHttpRequest'
    }
    url = 'https://www.bigo.tv/openOfficialWeb/vedioList/5'
    item = BigoTestItem()
    #ln -s /usr/local/python3/bin /usr/bin/scrapy
    redishandler = redis.StrictRedis(host='172.21.15.64', port=6379, db=7)

    def start_requests(self):
        #country_url = 'https://www.bigolive.tv/openInterface/getCountryInfoList'
        #response = requests.get(url=country_url, headers=self.headers)
        #print(response.text)
        #data_list = json.loads(response.text)
        #country_areas = data_list.get('data', '')
        ids = [
            "WF", "JP", "JM", "JO", "WS", "GW", "GU", "GT", "GR", "GQ", "GP",
            "GY", "GF", "GE", "GD", "GB", "GA", "GN", "GM", "GL", "GI", "GH",
            "PR", "PS", "PW", "PT", "PY", "PA", "PF", "PG", "PE", "PK", "PH",
            "PN", "PL", "PM", "ZM", "ZA", "ZW", "ME", "MD", "MG", "MF", "MA",
            "MC", "MM", "ML", "MO", "MN", "MH", "MK", "MU", "MT", "MW", "MV",
            "MQ", "MP", "MS", "MR", "MY", "MX", "MZ", "FR", "FI", "FJ", "FK",
            "FM", "FO", "CK", "CI", "CH", "CO", "CN", "CM", "CL", "CC", "CA",
            "CG", "CF", "CD", "CZ", "CY", "CX", "CR", "CV", "CU", "SZ", "SY",
            "SS", "SR", "SV", "ST", "SK", "SI", "SH", "SO", "SN", "SM", "SL",
            "SC", "SB", "SA", "SG", "SE", "SD", "YE", "YT", "LB", "LC", "LA",
            "LK", "LI", "LV", "LT", "LU", "LR", "LS", "LY", "VA", "VC", "VE",
            "VG", "IQ", "VI", "IS", "IR", "IT", "VN", "IM", "IL", "IN", "IE",
            "ID", "BD", "BE", "BF", "BG", "BA", "BB", "BL", "BM", "BN", "BO",
            "BH", "BI", "BJ", "BT", "BW", "BR", "BS", "BY", "BZ", "RU", "RW",
            "RS", "RO", "OM", "HR", "HT", "HU", "HK", "HN", "EE", "EG", "EC",
            "ET", "ES", "ER", "UY", "UZ", "US", "UG", "UA", "VU", "NI", "NL",
            "NO", "NA", "NC", "NE", "NG", "NZ", "NP", "NR", "NU", "KG", "KE",
            "KI", "KH", "KN", "KM", "KR", "KP", "KW", "KZ", "KY", "DO", "DM",
            "DJ", "DK", "DG", "DE", "DZ", "TZ", "TV", "TW", "TT", "TR", "TN",
            "TO", "TL", "TM", "TJ", "TK", "TH", "TG", "TD", "TC", "AE", "AD",
            "AG", "AF", "AI", "AM", "AL", "AO", "AN", "AQ", "AS", "AR", "AU",
            "AT", "AW", "AZ", "QA"
        ]
        for country_area in ids:
            datas = {
                'ignoreUids': '1578156944',
                'tabType': country_area,
            }

            yield scrapy.FormRequest(url=self.url,
                                     headers=self.headers1,
                                     formdata=datas,
                                     meta={"country": country_area},
                                     callback=self.parse)

    def parse(self, response):
        country_area = response.meta.get('country', '')
        next_page_parms = response.meta.get('next_page_parms', '')
        #self.logger.error(response.status_code,'================================')

        data = json.loads(response.text)

        post_data = ''
        if data is not None:
            for data_1 in data:
                detail_link = 'http://www.bigolive.tv/' + str(
                    data_1['bigo_id'])  #第一次请求

                # yield scrapy.Request(url=detail_link, meta={'datas': data_1, "country": country_area}, headers=self.headers, callback=self.parse_bean)   #将解析的data传递至parse_bean
                owner = '.' + str(data_1['owner'])  #拼接参数
                post_data += owner  #拼接参数
                self.redishandler.rpush('bigo_spider:items', data_1['bigo_id'])

            post_data = next_page_parms + post_data
            if len(data) == 30:
                datas = {
                    'tabType': country_area,
                    'ignoreUids': '1578156944' + post_data,
                }
                self.logger.error(datas)
                yield scrapy.FormRequest(url=self.url,
                                         headers=self.headers1,
                                         formdata=datas,
                                         meta={
                                             "country": country_area,
                                             'next_page_parms': post_data
                                         },
                                         callback=self.parse,
                                         dont_filter=True)

    #

    def parse_bean(self, response):

        data_json = response.meta.get('datas', '')
        country = response.meta.get('country', '')
        batch = time.strftime('%Y-%m-%d %H') + ':00:00'
        contribution_value = response.xpath('//i[@class="beans"]/text()').get()

        extras = {}
        self.item['cat1'] = '秀场'
        self.item['cat2'] = ''
        self.item['uid'] = str(data_json['room_id'])
        self.item['online'] = data_json['user_count']
        self.item['nickname'] = data_json['nick_name']
        self.item['platform'] = 'bigo'
        self.item['fans'] = ''
        self.item['contribution'] = contribution_value
        self.item['crawl_time'] = time.strftime('%Y-%m-%d %H:%M:%S',
                                                time.localtime(time.time()))
        self.item['batch'] = batch
        extras['owner'] = data_json['owner']
        extras['bigo_id'] = data_json['bigo_id']
        extras['country'] = country
        # extras['country_name'] = data_json['country_name']
        self.logger.info(extras)
        self.item['extras'] = extras
        yield self.item
コード例 #23
0
import redis

if __name__ == '__main__':

    sr = redis.StrictRedis(host='10.0.0.4', port=6379, db=0)
    # sr = redis.StrictRedis() # 默认127.0.0.1

    # 创建修改
    res = sr.set('k', 'dhjashd111111')
    print(res)

    # 获取
    res = sr.get('k')
    print(res)

    # 删除
    res = sr.delete('k')
    print(res)  # 返回的数字代表成功几个

    # 获取全部键
    res = sr.keys()
    print(res)
コード例 #24
0
import redis
import json
import datetime
import requests

queue_name = 'tracklogSegursatQueue'
gp_username = '******'
gp_password = '******'
items = []

redisClient = redis.StrictRedis(host='localhost', port=6379, db=0)
while (redisClient.llen(queue_name) != 0):
    item = redisClient.lpop(queue_name)
    item = json.loads(item)
    provider = item['provider']
    license_plate = item['unit_name']
    timestamp = int(item['timestamp'])
    dt = datetime.datetime.utcfromtimestamp(timestamp)
    dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
    latitude = item['latitude']
    longitude = item['longitude']
    altitude = int(float(item['altitude']))
    speed = int(item['speed'])
    angle = int(item['angle'])
    input_value = 0
    if item['ignition'] == 'true' or item['ignition'] == 1 or item[
            'ignition'] == True:
        input_value = 1

    xml_item = f"""
        <TypeInfo>
コード例 #25
0
def ices_init():
    global r, silence
    r = redis.StrictRedis(host='redis', port=6379, db=0)
    r.set('next_song', silence)
    return 1
コード例 #26
0
import redis

# 读取数据库配置
cf = configparser.ConfigParser()
# configparser 用以读写配置文件
cf.read('DatabaseConfig.ini', encoding='utf-8')

cf_redis_name = "REDIS_TEST"
# mysql
redis_db = cf.get(cf_redis_name, 'redis_db')
redis_host = cf.get(cf_redis_name, 'redis_host')
redis_port = cf.get(cf_redis_name, 'redis_port')
# mysql_user = cf.get(cf_redis_name, 'mysql_user')
redis_password = cf.get(cf_redis_name, 'redis_password')
r = redis.StrictRedis(host=redis_host,
                      port=int(redis_port),
                      db=int(redis_db),
                      password=redis_password)

cf_mysql_name = "MYSQL_TEST"
# mysql
mysql_db = cf.get(cf_mysql_name, 'mysql_db')
mysql_host = cf.get(cf_mysql_name, 'mysql_host')
mysql_port = cf.get(cf_mysql_name, 'mysql_port')
mysql_user = cf.get(cf_mysql_name, 'mysql_user')
mysql_password = cf.get(cf_mysql_name, 'mysql_password')


#每隔
def mysql2redis(id):

    mysql_conn = pymysql.connect(mysql_host,
コード例 #27
0
ファイル: dummy.py プロジェクト: ppp0/openbroadcast
import redis
import json
import string
import random
import time

rs = redis.StrictRedis()


while True:

    names = ['peter', 'johannes', 'klaus', 'root']

    num = random.randint(5,45)

    str = ''.join(random.choice((string.whitespace *3) + string.ascii_lowercase) for x in range(num))

    rs.publish('push_chat', json.dumps({'type': 'message', 'comment': '%s' % str, 'user': '******' % random.choice(names)}))
    print 'sleep %s' % num
    time.sleep(num/20)
    time.sleep(5.5)
    
コード例 #28
0
ファイル: views.py プロジェクト: JefvdA/AP
from django.shortcuts import redirect, render
import redis

from .models import Author, Quote

r = redis.StrictRedis('localhost', 6379, decode_responses=True)

# Create your views here.
def index(request):
    keys = r.keys('author:*')
    authors = []
    for key in keys:
        a = Author()
        a.id = key.split(':')[1]
        a.author_name = key.split(':')[2]
        a.author_bio = r.get(key)
        authors.append(a)

    return render(request, 'quotes/index.html', {'authors': authors})

def detail(request, author_id):
    keys = r.keys(f'author:{author_id}:*')
    key = keys[0]
    a = Author()
    a.id = int(author_id)
    a.author_name = key.split(':')[2]
    a.author_bio = r.get(key)
    quote_list = r.smembers(f'quote:{author_id}')

    return render(request, 'quotes/detail.html', {'author':a, 'quote_list': quote_list})
コード例 #29
0
ファイル: app.py プロジェクト: ak9250/ganarts
from pathlib import Path
import json
from io import BytesIO
from flask import Flask, render_template, send_file
import redis

app = Flask(__name__)
sync_file = Path('sync_file')
sync_file.touch()
prefix_file = Path('prefix_file')
redis_conn = redis.StrictRedis(host='redis', port=6379, db=0)


def read_urls(prefix):
    urls = redis_conn.get(f'{prefix}_images_urls')
    urls = json.loads(urls if urls is not None else '[]')
    return urls


def load_image(filename):
    img = redis_conn.get(filename)
    return img


@app.route('/')
def index():
    sync_file.touch()
    prefix = prefix_file.read_text()

    urls = read_urls(prefix)
    return render_template('index.html',
コード例 #30
0
 def __init__(self, config):
     self._redis = redis.StrictRedis(host=config.host,
                                     port=config.port,
                                     password=config.password,
                                     db=0)