def initialize(self, stormconf, context): self.pid = os.getpid() self.cass = Cassandra() self.key_dict = {} self.mc = memcache.Client(FLAGS.memcached_servers, debug=0)
db = dbm.open('definitions', 'c') db['mustard'] = 'yellow' db['ketchup'] = 'red' db['pesto'] = 'green' len(db) db['pesto'] db.close() db = dbm.open('definitions', 'r') db['mustard'] import memcache db = memcache.Client(['127.0.0.1:11211']) db.set('marco', 'polo') db.get('marco') db.set('ducks', 0) db.get('ducks') db.incr('ducks', 2) db.get('ducks') import redis conn = redis.Redis()
import random import pymysql from flask import Flask, render_template, request import memcache import os import hashlib import time app = Flask(__name__) memC = memcache.Client([''], debug=0) def connectDB(): return pymysql.connect(host='', port=3306, user='', password='', db='', local_infile=True) def createDB(): conn = connectDB() cur = conn.cursor() cur.execute("""DROP TABLE IF EXISTS data""") conn.commit() query = """ CREATE TABLE data ( `Gender` VARCHAR(6) CHARACTER SET utf8, `GivenName` VARCHAR(9) CHARACTER SET utf8,
import memcache mc = memcache.Client(['127.0.0.1:11211'], debug=0) print "menggunakan class=========================" class Oki(): nama = "oki priyadi" panggilan = "oki" lahir = "bandung" saya = Oki mc.set("ini_key1", saya) value = mc.get("ini_key1") print "value 1", value.nama print "value 2", value.panggilan print "value 3", value.lahir print "menggunakan dict=========================" ini_dict = {"nami":"oki priyadi", "jenengan":"oki","kawit":"bdg"} mc.set("ini_key2", ini_dict) value = mc.get("ini_key2") print "value 1", value["nami"] print "value 2", value["jenengan"] print "value 3", value["kawit"]
def POST(self): if DEBUG_ENABLED: print "[" + time.strftime("%c") + "]" + " Received POST: " web.header('Content-Type', 'application/json') channel = web.input()["channel"].strip() state = web.input()["state"].strip() mode = web.input()["mode"].strip() #print "\tCMD \""+cmd+"\"" #print "\tSTATE \""+state+"\"" mem = memcache.Client([('127.0.0.1', 11211)]) if mode == "toggle": if channel == "ch1": if state == "on": turnOn(1, mem) elif state == "off": turnOff(1, mem) return json.dumps({"id": "ch1"}) elif channel == "ch2": if state == "on": turnOn(2, mem) elif state == "off": turnOff(2, mem) return json.dumps({"id": "ch2"}) elif channel == "ch3": if state == "on": turnOn(3, mem) elif state == "off": turnOff(3, mem) return json.dumps({"id": "ch3"}) elif channel == "pir": if state == "on": turnOn(4, mem) elif state == "off": turnOff(4, mem) return json.dumps({"id": "pir"}) elif mode == "signal": if channel == "volumeup": os.system('irsend SEND_ONCE rpi KEY_VOLUMEUP') os.system('irsend SEND_ONCE rpi KEY_VOLUMEUP') os.system('irsend SEND_ONCE rpi KEY_VOLUMEUP') os.system('irsend SEND_ONCE rpi KEY_VOLUMEUP') os.system('irsend SEND_ONCE rpi KEY_VOLUMEUP') elif channel == "volumedown": os.system('irsend SEND_ONCE rpi KEY_VOLUMEDOWN') os.system('irsend SEND_ONCE rpi KEY_VOLUMEDOWN') os.system('irsend SEND_ONCE rpi KEY_VOLUMEDOWN') os.system('irsend SEND_ONCE rpi KEY_VOLUMEDOWN') os.system('irsend SEND_ONCE rpi KEY_VOLUMEDOWN') elif channel == "receiveron": os.system('irsend SEND_ONCE rpi KEY_POWER') elif channel == "receiveroff": os.system('irsend SEND_ONCE rpi KEY_SLEEP') elif channel == "tv": os.system('irsend SEND_ONCE rpi KEY_POWER2') elif mode == "timer": setTime = web.input()["time"].strip() if channel == "ch1on": if state == "on": turnOnTimer(1, setTime, mem) return json.dumps({"id": "ch1on", "result": "on"}) elif state == "off": turnOnTimer(1, "0", mem) return json.dumps({"id": "ch1on", "result": "off"}) elif channel == "ch2on": if state == "on": turnOnTimer(2, setTime, mem) return json.dumps({"id": "ch2on", "result": "on"}) elif state == "off": turnOnTimer(2, "0", mem) return json.dumps({"id": "ch2on", "result": "off"}) elif channel == "ch3on": if state == "on": turnOnTimer(3, setTime, mem) return json.dumps({"id": "ch3on", "result": "on"}) elif state == "off": turnOnTimer(3, "0", mem) return json.dumps({"id": "ch3on", "result": "off"}) elif channel == "piron": if state == "on": turnOnTimer(4, setTime, mem) return json.dumps({"id": "piron", "result": "on"}) elif state == "off": turnOnTimer(4, "0", mem) return json.dumps({"id": "piron", "result": "off"}) elif channel == "ch1off": if state == "on": turnOffTimer(1, setTime, mem) return json.dumps({"id": "ch1off", "result": "on"}) elif state == "off": turnOffTimer(1, "0", mem) return json.dumps({"id": "ch1off", "result": "off"}) elif channel == "ch2off": if state == "on": turnOffTimer(2, setTime, mem) return json.dumps({"id": "ch2off", "result": "on"}) elif state == "off": turnOffTimer(2, "0", mem) return json.dumps({"id": "ch2off", "result": "off"}) elif channel == "ch3off": if state == "on": turnOffTimer(3, setTime, mem) return json.dumps({"id": "ch3off", "result": "on"}) elif state == "off": turnOffTimer(3, "0", mem) return json.dumps({"id": "ch3off", "result": "off"}) elif channel == "piroff": if state == "on": turnOffTimer(4, setTime, mem) return json.dumps({"id": "piroff", "result": "on"}) elif state == "off": turnOffTimer(4, "0", mem) return json.dumps({"id": "piroff", "result": "off"}) elif mode == "check": if DEBUG_ENABLED: print "\tSending all channel states" return json.dumps(memToDict(mem)) #result = json.dumps("channel 1 complete") return None
class Env(object): """ Load information about the execution environment. """ # Root of the git repository (edx-platform) REPO_ROOT = path(__file__).abspath().parent.parent.parent # Reports Directory REPORT_DIR = REPO_ROOT / 'reports' METRICS_DIR = REPORT_DIR / 'metrics' # Python unittest dirs PYTHON_COVERAGERC = REPO_ROOT / ".coveragerc" # Bok_choy dirs BOK_CHOY_DIR = REPO_ROOT / "common" / "test" / "acceptance" BOK_CHOY_LOG_DIR = REPO_ROOT / "test_root" / "log" BOK_CHOY_REPORT_DIR = REPORT_DIR / "bok_choy" BOK_CHOY_COVERAGERC = BOK_CHOY_DIR / ".coveragerc" # If set, put reports for run in "unique" directories. # The main purpose of this is to ensure that the reports can be 'slurped' # in the main jenkins flow job without overwriting the reports from other # build steps. For local development/testing, this shouldn't be needed. if os.environ.get("SHARD", None): shard_str = "shard_{}".format(os.environ.get("SHARD")) BOK_CHOY_REPORT_DIR = BOK_CHOY_REPORT_DIR / shard_str BOK_CHOY_LOG_DIR = BOK_CHOY_LOG_DIR / shard_str # For the time being, stubs are used by both the bok-choy and lettuce acceptance tests # For this reason, the stubs package is currently located in the Django app called "terrain" # where other lettuce configuration is stored. BOK_CHOY_STUB_DIR = REPO_ROOT / "common" / "djangoapps" / "terrain" # Directory that videos are served from VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video" BOK_CHOY_SERVERS = { 'lms': { 'port': 8003, 'log': BOK_CHOY_LOG_DIR / "bok_choy_lms.log" }, 'cms': { 'port': 8031, 'log': BOK_CHOY_LOG_DIR / "bok_choy_studio.log" } } BOK_CHOY_STUBS = { 'xqueue': { 'port': 8040, 'log': BOK_CHOY_LOG_DIR / "bok_choy_xqueue.log", 'config': 'register_submission_url=http://0.0.0.0:8041/test/register_submission', }, 'ora': { 'port': 8041, 'log': BOK_CHOY_LOG_DIR / "bok_choy_ora.log", 'config': '', }, 'comments': { 'port': 4567, 'log': BOK_CHOY_LOG_DIR / "bok_choy_comments.log", }, 'video': { 'port': 8777, 'log': BOK_CHOY_LOG_DIR / "bok_choy_video_sources.log", 'config': "root_dir={}".format(VIDEO_SOURCE_DIR), }, 'youtube': { 'port': 9080, 'log': BOK_CHOY_LOG_DIR / "bok_choy_youtube.log", }, 'edxnotes': { 'port': 8042, 'log': BOK_CHOY_LOG_DIR / "bok_choy_edxnotes.log", } } # Mongo databases that will be dropped before/after the tests run BOK_CHOY_MONGO_DATABASE = "test" BOK_CHOY_CACHE = memcache.Client(['0.0.0.0:11211'], debug=0) # Test Ids Directory TEST_DIR = REPO_ROOT / ".testids" # Files used to run each of the js test suites # TODO: Store this as a dict. Order seems to matter for some # reason. See issue TE-415. JS_TEST_ID_FILES = [ REPO_ROOT / 'lms/static/js_test.yml', REPO_ROOT / 'lms/static/js_test_coffee.yml', REPO_ROOT / 'cms/static/js_test.yml', REPO_ROOT / 'cms/static/js_test_squire.yml', REPO_ROOT / 'common/lib/xmodule/xmodule/js/js_test.yml', REPO_ROOT / 'common/static/js_test.yml', REPO_ROOT / 'common/static/js_test_requirejs.yml', ] JS_TEST_ID_KEYS = [ 'lms', 'lms-coffee', 'cms', 'cms-squire', 'xmodule', 'common', 'common-requirejs' ] JS_REPORT_DIR = REPORT_DIR / 'javascript' # Directories used for common/lib/ tests LIB_TEST_DIRS = [] for item in (REPO_ROOT / "common/lib").listdir(): if (REPO_ROOT / 'common/lib' / item).isdir(): LIB_TEST_DIRS.append(path("common/lib") / item.basename()) LIB_TEST_DIRS.append(path("pavelib/paver_tests")) # Directory for i18n test reports I18N_REPORT_DIR = REPORT_DIR / 'i18n' # Service variant (lms, cms, etc.) configured with an environment variable # We use this to determine which envs.json file to load. SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None) # If service variant not configured in env, then pass the correct # environment for lms / cms if not SERVICE_VARIANT: # this will intentionally catch ""; if any(i in sys.argv[1:] for i in ('cms', 'studio')): SERVICE_VARIANT = 'cms' else: SERVICE_VARIANT = 'lms' @lazy def env_tokens(self): """ Return a dict of environment settings. If we couldn't find the JSON file, issue a warning and return an empty dict. """ # Find the env JSON file if self.SERVICE_VARIANT: env_path = self.REPO_ROOT.parent / "{service}.env.json".format( service=self.SERVICE_VARIANT) else: env_path = path("env.json").abspath() # If the file does not exist, here or one level up, # issue a warning and return an empty dict if not env_path.isfile(): env_path = env_path.parent.parent / env_path.basename() if not env_path.isfile(): print( "Warning: could not find environment JSON file " "at '{path}'".format(path=env_path), file=sys.stderr, ) return dict() # Otherwise, load the file as JSON and return the resulting dict try: with open(env_path) as env_file: return json.load(env_file) except ValueError: print( "Error: Could not parse JSON " "in {path}".format(path=env_path), file=sys.stderr, ) sys.exit(1) @lazy def feature_flags(self): """ Return a dictionary of feature flags configured by the environment. """ return self.env_tokens.get('FEATURES', dict())
def save(key, obj): json_str = jsonpickle.dumps(obj) # key hash from object + Key mc = memcache.Client(['127.0.0.1:11211'], debug=0) mc.set(key, json_str)
def init_cache(cls): cls.cache = memcache.Client(['127.0.0.1:9999'], debug=0)
def test_memcached_data(host): client = memcache.Client(["{0}:{1}".format('memcached.focal', 11211)]) sample_obj = {"foo": "bar"} client.set("foo", "bar", time=15) check = client.get("foo") assert check == "bar"
def memcached(self): ''' Connects to Memcached instance ''' if self._memcached is None: self._memcached = memcache.Client([self.config.memcached], debug=0) return self._memcached
def __init__(self, mc=None): if mc: self.mc = mc else: import memcache self.mc = memcache.Client(['127.0.0.1:11211'], debug=0)
def setUp(self): self.client = memcache.Client('127.0.0.1', self.port)
def test_connect_timeout2(self): # using connect timeout client = memcache.Client(self.unavailable_ip, 11211, connect_timeout=1) self.assertRaises(socket.timeout, client._connect) client.close()
def setUp(self): self.mc = memcache.Client(['localhost:11211'], debug=0)
try: import memcache except ImportError as e: USE_MEMCACHE = False LOG.debug("Memcached support disabled: '{0}'".format(e)) else: LOG.debug("Memcached support enabled") USE_MEMCACHE = True CACHE_TIMEOUT = 60 MEMCACHED_SERVERS = ["memcached.os-in-a-box:11211"] if USE_MEMCACHE and MEMCACHED_SERVERS is not None: try: MEMCACHED_CLIENT = memcache.Client( MEMCACHED_SERVERS, debug=0) LOG.debug("Memcached support enabled") except Exception as e: LOG.error("Error getting the memcache client: {0}".format(e)) MEMCACHED_CLIENT = None else: LOG.debug("Memcached support is disabled") MEMCACHED_CLIENT = None class IronicClientWrapper(object): MEMCACHE_PREFIX = "this_is_sparta" def __init__(self, **kwargs): self.ironic_client = ironic_client.get_client(
def __init__(self, config, logger): self.mc = memcache.Client(config.MEMCACHED_CONFIG) self.config = config self.logger = logger
def connection(self): if self._connection is None: self._connection = memcache.Client([self.full_addr], debug=0) return self._connection
def _create_client(self): import memcache settings = copy(self.settings) default_servers = ('localhost:11211', ) servers = settings.pop('servers', default_servers) self.client = memcache.Client(servers, **settings)
parser = argparse.ArgumentParser() parser.add_argument('-c', dest='config_file', help='path to config file', default='lg.cfg') args = parser.parse_args() app = Flask(__name__) app.config.from_pyfile(args.config_file) app.secret_key = app.config["SESSION_KEY"] app.debug = app.config["DEBUG"] file_handler = TimedRotatingFileHandler(filename=app.config["LOG_FILE"], when="midnight") file_handler.setLevel(getattr(logging, app.config["LOG_LEVEL"].upper())) app.logger.addHandler(file_handler) memcache_server = app.config.get("MEMCACHE_SERVER", "127.0.0.1:11211") memcache_expiration = int(app.config.get("MEMCACHE_EXPIRATION", "1296000")) # 15 days by default mc = memcache.Client([memcache_server]) def get_asn_from_as(n): asn_zone = app.config.get("ASN_ZONE", "asn.cymru.com") try: data = resolve("AS%s.%s" % (n, asn_zone) ,"TXT").replace("'","").replace('"','') except: return " "*5 return [ field.strip() for field in data.split("|") ] def add_links(text): """Browser a string and replace ipv4, ipv6, as number, with a whois link """ if type(text) in [str, unicode]:
def _connect(self): import memcache return memcache.Client(Config.getInstance().getMemcachedServers())
def __init__(self): self.cache = memcache.Client(['127.0.0.1:11211'], debug=True)
def _create_client(self): return memcache.Client(self.url)
GPIO.setup(channel1Off, GPIO.OUT) GPIO.setup(channel2On, GPIO.OUT) GPIO.setup(channel2Off, GPIO.OUT) GPIO.setup(channel3On, GPIO.OUT) GPIO.setup(channel3Off, GPIO.OUT) GPIO.setup(pirOn, GPIO.OUT) GPIO.setup(pir_pin, GPIO.IN) print "done." print "Setting up Astral package ...", geo = Astral() SUNSET = geo['San Francisco'] print "done." print "Allocating memcache ...", mc = memcache.Client([('127.0.0.1', 11211)]) print "done." print "Initializing web application library ...", urls = ('/', 'tutorial') render = web.template.render('templates/') app = web.application(urls, globals()) print "done." def stringToDatetime(frontendTime): frontendTime = frontendTime.strip().split(":") hour = int(frontendTime[0]) minute = int(frontendTime[1]) frontendTime = datetime.time(hour, minute, 0, 0) backendTime = datetime.datetime.combine(datetime.datetime.today(),
#encoding: utf-8 """使用memcache做缓存文件""" import memcache cache = memcache.Client(['127.0.0.1:11211'],debug=True) def set(key,value,timeout=60): return cache.set(key,value,timeout) def get(key): return cache.get(key) def delete(key): return cache.delete(key)
def _get_metrics(self, server, port, tags, memcache): mc = None # client try: self.log.debug("Connecting to %s:%s tags:%s", server, port, tags) mc = memcache.Client(["%s:%d" % (server, port)]) raw_stats = mc.get_stats() assert len(raw_stats) == 1 and len( raw_stats[0]) == 2, "Malformed response: %s" % raw_stats # Access the dict stats = raw_stats[0][1] for metric in stats: # Check if metric is a gauge or rate if metric in self.GAUGES: our_metric = self.normalize(metric.lower(), 'memcache') self.gauge(our_metric, float(stats[metric]), tags=tags) # Tweak the name if it's a rate so that we don't use the exact # same metric name as the memcache documentation if metric in self.RATES: our_metric = self.normalize(metric.lower() + "_rate", 'memcache') self.rate(our_metric, float(stats[metric]), tags=tags) # calculate some metrics based on other metrics. # stats should be present, but wrap in try/except # and log an exception just in case. try: self.gauge( "memcache.get_hit_percent", 100.0 * float(stats["get_hits"]) / float(stats["cmd_get"]), tags=tags, ) except ZeroDivisionError: pass try: self.gauge( "memcache.fill_percent", 100.0 * float(stats["bytes"]) / float(stats["limit_maxbytes"]), tags=tags, ) except ZeroDivisionError: pass try: self.gauge( "memcache.avg_item_size", float(stats["bytes"]) / float(stats["curr_items"]), tags=tags, ) except ZeroDivisionError: pass except AssertionError: raise Exception( "Unable to retrieve stats from memcache instance: " + server + ":" + str(port) + ". Please check your configuration") if mc is not None: mc.disconnect_all() self.log.debug("Disconnected from memcached") del mc
def open(self): if not self.client: try: self.client = memcache.Client(self.config['servers']) except: raise ImportError('You must have python3-memcached installed.')
def _get_memcache_connection(self): return memcache.Client([settings.CACHES['default']['LOCATION']], debug=0)
import memcache import subprocess import json out = "memdump" host = "127.0.0.1" port = "11211" if "-f" in sys.argv: out = sys.argv[sys.argv.index("-f")+1] if "-h" in sys.argv: host = sys.argv[sys.argv.index("-h")+1] if "-p" in sys.argv: port = sys.argv[sys.argv.index("-p")+1] print "conecting..." hostport = host+":"+port s = memcache.Client([hostport]) print "checking keys..." output = subprocess.check_output("memcdump --servers="+hostport, shell=True) data= output.split('\n') lista = {} for key in data: if key != '': lista[key]=s.get(key) print "dumping "+key+"..." jsonfinal = json.dumps(lista, ensure_ascii=False) print "writing output..." fileWrite = open(out, "w") fileWrite.write(jsonfinal) fileWrite.close() print "memcache file created" print "finish dump"
def con_memcache(): memcache_conf = getConfig('memcachedb') clt = eval('[\'' + memcache_conf['host'] + ':' + memcache_conf['port'] + '\']') mc = memcache.Client(clt, debug=0) print(mc.get('GAMEAPP.24CAIPIAO.COM:temp:GAME_3850_GET_REWARD_CONFIG'))
def create_connection(self): if self._num_connections >= self.max_connections: raise RuntimeError("Too many memcached connections") self._num_connections += 1 return memcache.Client(*self.connection_args, **self.connection_kwargs)