Exemple #1
0
CACHES = {
    'default': {
        'BACKEND': 'redis_cache.RedisCache',
        'LOCATION': '{}:{}'.format(REDIS_HOST, 6379),
    },
}

# Huey settings
HUEY = {
    'huey_class': 'huey.RedisHuey',
    'name': 'meetuper_huey',
    'immediate': os.getenv('HUEY_IMMEDIATE') == '1' or IS_TESTING,
    'connection': {
        'connection_pool': redis.ConnectionPool(
            max_connections=50,
            host=REDIS_HOST,
            port=6379
        ),
    },
    'consumer': {
        'workers': 4,
        'worker_type': 'thread',
        'check_worker_health': True,
        'health_check_interval': 1,
    },
}


# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
import redis
import pickle
import logging

from config import REDISDB, REDISHOST, REDISPORT

rules_pool = redis.ConnectionPool(
    host=REDISHOST, port=REDISPORT, db=REDISDB)
cache_type = {
    'rules': rules_pool
}
logger = logging.getLogger(__name__)


def set(key, value, ctype='rules', ex=None, px=None, nx=False, xx=False):
    pool = cache_type.get(ctype)
    r = redis.StrictRedis(connection_pool=pool)
    return r.set(key, pickle.dumps(value), ex, px, nx, xx)


def get(key, ctype='rules'):
    pool = cache_type.get(ctype)
    r = redis.StrictRedis(connection_pool=pool)
    pickled_value = r.get(key)
    if pickled_value is None:
        return None
    result = pickle.loads(pickled_value)
    return result


def exists(key, ctype='rules'):
Exemple #3
0
import redis

'''
r = redis.Redis(host='127.0.0.1')
# print(r.keys())
# all_keys = r.keys()
# for k in all_keys:
#     print(k, r.get(k.decode()))

r.set("Name", "ChunYun", ex=3)  # 3秒过期
print(r.get("Name"))
'''
# 连接池

pool = redis.ConnectionPool(host="127.0.0.1")
r = redis.Redis(connection_pool=pool)
# print(r.keys())


# r.set("Name", "A", nx=True)
# print(r.get("Name"))


# r.mset(k1='v1', k2='v2')  # 批量设置值
# print(r.keys())

# r.set("id", "37148119...")
# print(r.getrange("id", 3, 6))  # 相当于切片

# r.set("id", "37148119...")
Exemple #4
0
#!/usr/bin/env python
# coding=utf-8
import paho.mqtt.client as mqtt
import time
import redis


def on_connect(client, userdata, flags, rc):
    print("Connected with result code " + str(rc))


def on_message(client, userdata, msg):
    # 写入到redis中
    msg_now = (msg.payload, time.strftime("%Y-%m-%d %H:%M:%S"),
               msg.topic + "/" + rds.get("table00"))
    rds.append(rds.get("table00"), str(msg_now) + "*")


pool = redis.ConnectionPool(host="127.0.0.1", port=6379, db=1)
# pool.connection_kwargs["db"] = 2 #连接前切换db
rds = redis.Redis(connection_pool=pool)
# rds.connection_pool.connection_kwargs["db"] = 2 #连接后切换db

client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.username_pw_set("admin", "public")
client.connect("192.168.199.221", 1883, 60)
client.subscribe("testtopic", qos=0)
client.loop_forever()
]

MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')

MODELTRANSLATION_DEFAULT_LANGUAGE = 'en'
MODELTRANSLATION_LANGUAGES = ('en', 'ru', 'pl', 'uk')

MODELTRANSLATION_TRANSLATION_FILES = (
    'gifts.translation', 'jobs.translation',)

REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_DB = 0

POOL = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)

CACHES = {
    'default': {
        'BACKEND': 'django_redis.cache.RedisCache',
        'LOCATION': 'redis://localhost:6379/1',
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.DefaultClient"
        }
    },
    "select2": {
        "BACKEND": "django_redis.cache.RedisCache",
        "LOCATION": "redis://localhost:6379/2",
        "OPTIONS": {
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
        }
Exemple #6
0
 def get_pool(self, connection_info=None, max_connections=None):
     connection_info = connection_info or {'a': 1, 'b': 2, 'c': 3}
     pool = redis.ConnectionPool(connection_class=DummyConnection,
                                 max_connections=max_connections,
                                 **connection_info)
     return pool
 def __init__(self):
     self.pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
Exemple #8
0
# -*- coding:utf-8 -*-
# 作者:IT小学生蔡坨坨
# 时间:2020/12/4 10:55

import redis


# # 方法一:直接连接redis
# conn = redis.Redis(host='127.0.0.1', port=6379, encoding='utf-8')
# # 设置键值:15059224492="2222" 且超时时间为60秒(值写入到redis时会自动转字符串)
# conn.set('15059224492', 2222, ex=60)
# # 根据键获取值:如果存在获取值(获取到的是字节类型);不存在则返回None
# value = conn.get('15059224492')
# print(value)

from django.shortcuts import HttpResponse
from django_redis import get_redis_connection

# 方法二:创建redis连接池
pool = redis.ConnectionPool(host='127.0.0.1',
                            port=6379,
                            encoding='utf-8',
                            max_connections=1000)
# 去连接池中获取一个连接
conn = redis.Redis(connection_pool=pool)

conn.set('15059224492', 9986, ex=60)
# 根据键获取值:如果存在获取值(获取到的是字节类型);不存在则返回None
value = conn.get('15059224492')
print(value)
Exemple #9
0
MONGODB = connect(MONGO_DB.pop('name'), **MONGO_DB)

MONGO_ANALYTICS_DB_DEFAULTS = {
    'name': 'nbanalytics',
    'host': 'db_mongo_analytics:27017',
    'alias': 'nbanalytics',
}
MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB)
MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'),
                           **MONGO_ANALYTICS_DB)

# =========
# = Redis =
# =========

REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'],
                                            port=6379,
                                            db=2)
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'],
                                             port=6379,
                                             db=3)
REDIS_FEED_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
REDIS_SESSION_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=5)
# REDIS_CACHE_POOL         = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'],
                                         port=6379,
                                         db=0)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'],
                                             port=6379,
                                             db=1)
Exemple #10
0
# coding: utf-8
import hashlib
import os
import uuid

import redis
import tornado.ioloop
import tornado.web

pool = redis.ConnectionPool(host="10.1.10.51", port=6379)
conn = redis.Redis(connection_pool=pool)


class RedisSeesion:
    CookieID = "name"
    ExpiresTime = 60 * 10

    def __init__(self, handler):
        self.handler = handler
        SessionID = self.handler.get_secure_cookie(RedisSeesion.CookieID, None)
        if SessionID and conn.exists(SessionID):
            self.SessionID = SessionID
        else:
            self.SessionID = self.SessionKey()
            conn.hset(self.SessionID, None, None)
        conn.expire(self.SessionID, RedisSeesion.ExpiresTime)
        self.handler.set_secure_cookie(RedisSeesion.CookieID, self.SessionID)

    def SessionKey(self):
        UUID = str(uuid.uuid1()).replace("-", "")
        MD5 = hashlib.md5()
Exemple #11
0
def get_redis_pool():
    global REDIS_POOL
    if not REDIS_POOL:
        REDIS_POOL = redis.ConnectionPool(host="127.0.0.1", port=6379, db=0)
    return REDIS_POOL
Exemple #12
0
 def __init__(self, host, port=6379):
     self.pool = redis.ConnectionPool(host=host, port=port, db=0)
Exemple #13
0
# -*- coding=utf-8 -*-
from pandas import Series, DataFrame
import pandas as pd
import redis

# 造数据
data = {'state': ['Ohio', 'Ohio', 'Ohio', 'Nevada', 'Nevada'],
        'year': [2000, 2001, 2002, 2001, 2002],
        'pop': [1.5, 1.7, 3.6, 2.4, 2.9]}
frame = DataFrame(data)

# 获取redis连接
pool = redis.ConnectionPool(host='127.0.0.1', port=6379)
r = redis.Redis(connection_pool=pool)

# r.set("frame", frame)
# a = r.get("frame")

# 存储
r.set("frame", frame.to_msgpack(compress='zlib'))
# 读取
a = pd.read_msgpack(r.get("frame"))
print(a)
Exemple #14
0
 def _get_pool(self):
     return redis.ConnectionPool(**self._connparams())
Exemple #15
0
from web.settings import *
import config
from fuzzer import Fuzzer, InstallError
from concolic import Concolic, pcap

l = logging.getLogger("mining.tasks")

db = torndb.Connection(db_server, db_database, db_username, db_password)

redis_url = "redis://%s:%d" % (config.REDIS_HOST, config.REDIS_PORT)
app = Celery('tasks', broker=redis_url, backend=redis_url)
app.conf.CELERY_ROUTES = config.CELERY_ROUTES
app.conf['CELERY_ACKS_LATE'] = True
app.conf['CELERYD_PREFETCH_MULTIPLIER'] = 1

redis_pool = redis.ConnectionPool(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB)

def get_fuzzer_id(input_data_path):
    # get the fuzzer id
    abs_path = os.path.abspath(input_data_path)
    if "sync/" not in abs_path or "id:" not in abs_path:
        l.warning("path %s, cant find fuzzer id", abs_path)
        return "None"
    fuzzer_name = abs_path.split("sync/")[-1].split("/")[0]
    input_id = abs_path.split("id:")[-1].split(",")[0]
    return fuzzer_name + ",src:" + input_id

@app.task
def drill(binary, input_data, bitmap_hash, tag):
    redis_inst = redis.Redis(connection_pool=redis_pool)
    fuzz_bitmap = redis_inst.hget(binary + '-bitmaps', bitmap_hash)
Exemple #16
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import datetime
import time
import sys
import simplejson as json
import redis

from config.role import HOST_ROLE, MASTER_SETINEL_HOST, MASTER_REDIS_MASTER, SLAVE_SETINEL_HOST, SLAVE_REDIS_MASTER
from config.rkeys import *

import pprint

# redis
POOL = redis.ConnectionPool(host='localhost', port=6379, db=0)
r = redis.StrictRedis(connection_pool=POOL)

pp = pprint.PrettyPrinter(indent=4)

#r.flushdb()
#sys.exit()

#r.delete(r_SS_FLRN_BTC_1H_HISTORY)
#sys.exit()


def check_redis():
    if HOST_ROLE == 'MASTER':
        SETINEL_HOST = MASTER_SETINEL_HOST
        REDIS_MASTER = MASTER_REDIS_MASTER
Exemple #17
0
sql1 = "set names utf8"
sql = "select id,content,title from content_text"

cursor.execute(sql1)
cursor.execute(sql)
res = cursor.fetchall()
id_dict = {}
for data in res:
    id_dict[str(data[0])] = 0
    for key in keywords:

        id_dict[str(data[0])] = id_dict[str(data[0])] + len(
            re.findall(key, data[1], flags=re.IGNORECASE))
    for key in keywords:
        #赋予标题的权值为5
        id_dict[str(data[0])] = id_dict[str(data[0])] + len(
            re.findall(key, data[2], flags=re.IGNORECASE) * 1000)

id_list = []

key_words = sorted(id_dict.items(), key=lambda x: x[1], reverse=True)
for i in key_words:
    if i[1] != 0:
        id_list.append(int(i[0]))

#把查询结果存入redis数据库
# host是redis主机,需要redis服务端和客户端都起着 redis默认端口是6379
pool = redis.ConnectionPool(host='localhost', port=6379, decode_responses=True)
r = redis.Redis(connection_pool=pool)
r.set(sessionId, str(id_list))
Exemple #18
0
# 项目状态转移路由
PROJECT_STATUS_CHANGE_ROUTE = {
    '初始化': ['配置'],
    '配置': ['就绪', '中止'],
    '就绪': ['执行', '中止'],
    '执行': ['暂停', '中止', '完成'],
    '暂停': ['执行', '中止'],
    '中止': ['结束'],
    '完成': ['结束'],
    '结束': []
}

# Redis连接池
_redis_pool = redis.ConnectionPool(host=redis_host,
                                   port=redis_port,
                                   db=redis_db)
# 公共redis连接
public_redis = redis.Redis(connection_pool=_redis_pool)


def alloc_redis():
    """获取缓存对象"""
    return redis.Redis(connection_pool=_redis_pool)


def alloc_subscriber():
    """获取一个订阅器"""
    return redis.client.PubSub(connection_pool=_redis_pool)

INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR 
THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""

#-*- coding: utf-8 -*-
import time
import random

import redis

import ExeSysMain.environment as env

pool = redis.ConnectionPool(host=env.REDISIP,
                            port=env.REDISPORT,
                            db=env.REDISDATADB)
redisconn = redis.Redis(connection_pool=pool)


def test_error_cmd(data_to_process):
    resp = {}
    #print "test function with data %s" %  data_to_process
    time.sleep(2 * random.random())
    print str(1 / 0)
    resp['EOJ'] = data_to_process
    return resp


def test_cmd(data_to_process):
    resp = {}
Exemple #20
0
def connect_docker_redis():
    pool = redis.ConnectionPool(host=REDIS_HOST_DOCKER,
                                port=6379,
                                decode_responses=True)
    return pool
Exemple #21
0
from sendmail import sendMail, weixin_alert
from ScanTool import Scan

sys.path.insert(0, '../')

mailto = conf.CONFIG.get('email').get('email_to')

REDIS_HOST = conf.CONFIG.get('redis').get('redis_host')
REDIS_PORT = conf.CONFIG.get('redis').get('redis_port')
REDIS_PASSWORD = conf.CONFIG.get('redis').get('redis_password')

Alertuserlist = conf.CONFIG.get('weixin').get('Alertuserlist')

pool = redis.ConnectionPool(
    host='%s' % REDIS_HOST,
    password='******' % REDIS_PASSWORD,
    port="%d" % REDIS_PORT,
    db=1,
    decode_responses=True)  # host是redis主机,需要redis服务端和客户端都起着 redis默认端口是6379
r = redis.Redis(connection_pool=pool)


def display_result(clobj, reportFile):
    scan = clobj()
    with open(os.getcwd() + '/tmp/' + '%s' % reportFile, 'rb') as sfile:
        task_lists = pickle.load(sfile)

    tem = {}
    for project in task_lists:
        for pro_id, tasks_id in project.items():
            results = scan.show_scan_result(tasks_id)
            tem[pro_id] = {pro_id: pro_id, 'ip_port': results}
Exemple #22
0
 def __init__(self):
     pool = redis.ConnectionPool(host='localhost', port=6379, db=6, decode_responses=True)
     self.r_conn = redis.Redis(connection_pool=pool)  # 创建连接池,并进行连接
     self.name = 'keyword:hot:search'
Exemple #23
0
# -*- coding: UTF-8 -*-

import redis

MasterIpList = ["192.168.199.132:6386", "192.168.199.132:6385", "192.168.199.131:6379", "192.168.199.131:6386", "192.168.199.131:6383", "192.168.199.132:6384"]
KeyList = []
RedisNodes = []
Count = 0

for MasterIp in MasterIpList:
    RedisHost = MasterIp.split(":")[0]
    RedisPort = MasterIp.split(":")[1]
    RedisNodes.append({"host": RedisHost, "port": RedisPort})

    pool = redis.ConnectionPool(host = RedisHost, port = RedisPort, db = 0)
    Conn = redis.Redis(connection_pool = pool)
    for Key in Conn.scan_iter(match='c:50000000001279*', count=100):
        #Conn.delete(Key)
        print(Key)
Exemple #24
0
import redis
import time

# 创建连接池并连接到redis
pool = redis.ConnectionPool(host='127.0.0.1',
                            db=0,
                            port=6379,
                            password='******')
r = redis.Redis(connection_pool=pool)


def time_spend(fun):
    def giao(r, *args, **kwargs):
        t1 = time.time()
        fun(r)
        t2 = time.time()
        print(t2 - t1)

    return giao


@time_spend
def withpipeline(r):
    p = r.pipeline()
    for i in range(1000):
        key = 'test1' + str(i)
        value = i + 1
        p.set(key, value)
    p.execute()

Exemple #25
0
    redpipe.renamenx("endlist", "T_endlist_%s" % (datadate))
    redpipe.renamenx("regiontable", "T_regiontable_%s" % (datadate))


try:
    Config = HandleConfig()

    logger = getLog("Step1", logfile=Config.LogFile, loglevel=Config.LogLevel)

    logger.info("Step1 Handle Start")

    checkService(logger, Config)

    datadate = Config.date

    redispool = redis.ConnectionPool(host=Config.RedisIp, port=6379, db=0)
    redata = redis.Redis(connection_pool=redispool)
    redpipe = redata.pipeline()

    #验证该日期数据是否已被处理
    if redata.hexists("checkdate", datadate):
        logger.error("The day the data has been processed")
        sys.exit(Config.EX_CODE_1)
    else:
        redata.hset("checkdate", datadate, 1)

    #添加当天日期到全局日期列表
    redata.rpush("date", datadate)

    #添加当天日期到uid_rdate
    queue = Queue.Queue(0)
Exemple #26
0
import sys
import redis
import tornado.websocket
import tornado.web
import tornado.ioloop

from redis.client import PubSub
from salt.client import get_local_client
from tornado import gen
from tornado.escape import to_unicode

from logs.utility import get_last_lines
from logs import settings

REDIS_CONNECTION_POOL = redis.ConnectionPool(host=settings.REDIS_HOST,
                                             port=settings.REDIS_PORT,
                                             password=settings.REDIS_PASSWD,
                                             db=5)


class SubWebSocket(tornado.websocket.WebSocketHandler):
    def open(self, *args, **kwargs):
        self.pubsub = PubSub(REDIS_CONNECTION_POOL,
                             ignore_subscribe_messages=True)
        self.client = self.request.connection.context.address
        print("opened")

    def assemble_cmd(self, log_path, cmd):
        kill_cmd = "kill `ps aux|grep logtail.py|grep %s|grep -v grep|awk '{print $2}'`" % (
            log_path, )
        return "{kill};{tail}".format(kill=kill_cmd, tail=cmd)
Exemple #27
0
# -*- coding:utf-8 -*- 
#!/usr/bin/env python3
#Description: wukong exploit 
#Author:      Bing
#DateTime:    2017-05-10 23:08:39
import sys
sys.path.append("..")

from core.settings import * 
import re, os, redis, socket, time, requests


try:
	pool = redis.ConnectionPool(host = redis_host, port = redis_port, db = redis_db_task , password = redis_pwd, socket_timeout=3)
	r = redis.Redis(connection_pool=pool)
except:
	pass


def program_log(datas):
	msg = False
	web_info = ""
	try:
		res = requests.post(url = API_LOG_URL, data = datas)
		text = res.status_code
		if int(text) == 200:
			web_info = "日志上传成功"
			msg = True
	except Exception as e:
		web_info = e.__str__()
		msg = False
Exemple #28
0
def conn():
    pool = redis.ConnectionPool(host='120.77.203.242',
                                port=6379,
                                decode_responses=True)
    r = redis.Redis(connection_pool=pool)
    return r
Exemple #29
0
        'host': '127.0.0.1',
        'database': 'netease_mail',
        'user': '******',
        'password': '******',
    },
    'redis': {
        'host': '127.0.0.1',
        'port': 6379,
        'db': 0
    }
}

# 数据库连接实例
myConn = torndb.Connection(**config['mysql'])

pool = redis.ConnectionPool(**config['redis'])
redisConn = redis.Redis(connection_pool=pool)


def group_mail():
    page = 0
    limit = 5000000
    key = 'zset_group_mail'
    while True:
        page_ = page * limit
        olist = myConn.query(" select * from `duobao_user_join` limit %s,%s ",
                             page_, limit)
        if not olist:
            break
        # zset = []
        for i in olist:
def split_by_submission(reddit_directory,
                        output_directory,
                        num_splits,
                        cached=False,
                        map_cache=None):
    """
    Splits the reddit dataset by submission ID

    :param reddit_directory: The top level reddit directory
    :param output_directory: Output directory to write independent sub-datasets
    :param num_splits: The number of segments to split the data into
    :param cached: Directory to store a serialized dictionary of
    :param compress: Compress intermediate files (The output of this script)
    :return: None
    """
    logger.debug("Creating target directories...")
    global target_directories
    target_directories = create_split_directories(output_directory, num_splits)
    logger.debug("Target directories created.")

    logger.debug("Connecting to Redis database...")
    global redis_pool
    redis_pool = redis.ConnectionPool(host="localhost", port=6379, db=0)

    if not cached:
        # The comment data must be loaded and read so that we have the mapping
        # from comment full-name to base (submission) full-name, which is required for the splitting
        # of the other data sets
        logger.info("No database of {comment --> submission} map cached.")
        logger.info("Processing comment tables...")
        split_data_set(reddit_directory,
                       "stanford_comment_data",
                       "post_fullname",
                       num_splits,
                       target_directories,
                       map_columns=("comment_fullname", "post_fullname"))

    elif map_cache is not None and os.path.isdir(map_cache) and os.listdir(
            map_cache):
        logger.debug("Loading dictionaries from cache into Redis...")
        load_dict_cache_into_db(map_cache)

    else:
        logger.debug(
            "Redis Database cache exists. Skipping comment splitting.")

    redis_db = redis.StrictRedis(connection_pool=redis_pool)
    logger.debug("Redis database has: %d keys" %
                 redis_db.info()['db0']['keys'])

    # Now split the rest of the data while adding a column using the mapping that we have
    for data_set_name in [
            "stanford_report_data", "stanford_removal_data",
            "stanford_vote_data"
    ]:
        mapped_split(reddit_directory, data_set_name, 'target_fullname',
                     'post_fullname', num_splits)

    # Split the submission tables (they don't need to be mapped using the database)
    logger.info("Processing submission tables...")
    split_data_set(reddit_directory, "stanford_submission_data",
                   "post_fullname", num_splits, target_directories)