def __init__(self, database_file): self.lock = lock.RLock() logger.info("Loading database from %s.", database_file) self.connection = sqlite3.connect(database_file) self.connection.row_factory = sqlite3.Row self.connection.text_factory = sqlite3.OptimizedUnicode
def __init__(self): self.deleted_keys = [] self.aborted_keys = [] self.exc = None # Protect exc, since some paths test it and then use it, which # can run afoul race conditions. self._exc_protect = lock.RLock()
def __init__(self, on_job_executed_cb=None): self.on_job_executed_cb = on_job_executed_cb self.jobs = set() self.job_greenlets = {} self.keep_running = True self.lock = lock.RLock() self.sleep_time = 0.1 self.iter_cb = None self.iter_cb_args = () self.ready = False
def __init__(self, engine, gm): # ensure engine can lock for this GM if required gm.makeLock() self.db_path = engine.paths.getDatabasePath(gm.url) self.engine = engine self.lock = lock.RLock() self.url = gm.url self.games = dict() self.db = None # needs connect_db to be run (but outside a db_session)
class ConfigManager(object): config_obj = None _lock = lock.RLock() @classmethod def get_instance(cls, fname="config.yaml"): if not cls.config_obj: with cls._lock: if not cls.config_obj: cls.config_obj = Config(fname) return cls.config_obj
def _makeOne(self): # If we don't set the hub before returning, # there's a potential race condition, if the implementation # isn't careful. If it's the background hub that winds up capturing # the hub, it will ask the hub to switch back to itself and # then switch to the hub, which will raise LoopExit (nothing # for the background thread to do). What is supposed to happen # is that the background thread realizes it's the background thread, # starts an async watcher and then switches to the hub. # # So we deliberately don't set the hub to help test that condition. return lock.RLock()
def __init__(self, engine, parent, name, color, is_gm): PlayerCache.instance_count += 1 self.engine = engine self.parent = parent # parent cache object self.name = name self.color = color self.uuid = uuid.uuid4().hex # used for HTML DOM id self.selected = list() self.index = parent.getNextId() # used for ordering players in the UI self.is_gm = is_gm # whether this player is the GM or not self.timeid = time.time( ) # NOTE: currently not used but could be useful later self.greenlet = None # fetch country flag from ip self.ip = self.engine.getClientIp(request) self.country = self.engine.getCountryFromIp(self.ip) # ? = localhost, 'unknown' = unittest self.flag = flag.flag( self.country) if self.country not in ['?', 'unknown'] else '' # add login to stats login_data = [ self.is_gm, time.time(), self.country, self.ip, PlayerCache.instance_count ] self.engine.logging.stats(json.dumps(login_data)) self.lock = lock.RLock() self.socket = None self.dispatch_map = { 'PING': self.parent.onPing, 'ROLL': self.parent.onRoll, 'SELECT': self.parent.onSelect, 'RANGE': self.parent.onRange, 'ORDER': self.parent.onOrder, 'UPDATE': self.parent.onUpdateToken, 'CREATE': self.parent.onCreateToken, 'CLONE': self.parent.onCloneToken, 'DELETE': self.parent.onDeleteToken, 'BEACON': self.parent.onBeacon, 'MUSIC': self.parent.onMusic, 'GM-CREATE': self.parent.onCreateScene, 'GM-MOVE': self.parent.onMoveScene, 'GM-ACTIVATE': self.parent.onActivateScene, 'GM-CLONE': self.parent.onCloneScene, 'GM-DELETE': self.parent.onDeleteScene }
class CouchManager(object): couch_obj = None _lock = lock.RLock() @classmethod def get_instance(cls): if not cls.couch_obj: with cls._lock: if not cls.couch_obj: cls.couch_obj = CouchProperties().conn() return cls.couch_obj @staticmethod def formdb_instance(): return CouchManager.get_instance()[CouchInfo.couch_formdb]
def run_receive(self, hostname, segment_size, num_segments, filename): ''' The main thread which will manage the file transfer with a client Block waiting to accept client connection after listening on ports. One connected, start reading & applying ''' logger.debug( "Starting receive thread. Accept connection from client: %s, for file: %s", hostname, filename) # initialize host queue if not already done so qLock = lock.RLock() qLock.acquire() if not self.host_queues.get(hostname): self.host_queues[hostname] = queue.Queue() qLock.release() self.receive(hostname, segment_size, num_segments, filename) return
def __init__(self, engine, parent, game): # prepare MD5 hashes for all images num_generated = game.makeMd5s() self.engine = engine self.parent = parent self.lock = lock.RLock() self.url = game.url self.players = dict() # name => player self.next_id = 0 # used for player indexing in UI self.playback = None #self.engine.logging.info('GameCache {0} for GM {1} created'.format(self.url, self.parent.url)) if num_generated > 0: self.engine.logging.info('{0} MD5 hashes generated'.format(num_generated))
def __init__(self, config, api): self.config = config self.api = api self.on_job_executed_cb = config.on_job_executed_cb self.startup_jobs = config.startup_jobs self.odb = config.odb self.jobs = set() self.job_greenlets = {} self.keep_running = True self.lock = lock.RLock() self.sleep_time = 0.1 self.iter_cb = None self.iter_cb_args = () self.ready = False self._add_startup_jobs = config._add_startup_jobs self._add_scheduler_jobs = config._add_scheduler_jobs self.job_log = getattr(logger, config.job_log_level)
def __init__(self, engine): self.engine = engine self.lock = lock.RLock() self.gms = dict() # add all GMs from database with db_session: gms = self.engine.main_db.GM.select() for i, gm in enumerate(gms): self.engine.logging.info('Creating GM {0}/{1} #{2}'.format(i+1, len(gms), gm.url)) self.insert(gm) # initialize GMs databases for i, gm in enumerate(self.gms): self.gms[gm].connect_db() self.engine.logging.info('Loaded GM {0}/{1} #{2}'.format(i+1, len(self.gms), gm)) self.engine.logging.info('EngineCache created')
class ConfigManager(object): config_obj = None _lock = lock.RLock() @classmethod def load(cls): default = os.path.join( path.dirname(path.dirname(path.abspath(__file__))), os.path.pardir, 'conf/config.yaml') dataMap = yaml.load(open(default)) return dataMap @classmethod def get_instance(cls): if not cls.config_obj: with cls._lock: if not cls.config_obj: cls.config_obj = cls.load() return cls.config_obj
def __init__(self, card): self._log = logging.getLogger(type(self).__name__) self._card = card self.__buffer = [] self.__buffer_lock = lock.RLock() self.__acq_mode = self.DefaultAcqMode self.__acq_status = AcqStatus.Ready self.__acq_expo_time = 1.0 self.__acq_point_period = None self.__acq_nb_points = 1 self.__acq_channels = () self.__timer_freq = 12.5E6 self.__event_loop = None self.__trigger_on_start = True self.__soft_started = False self.__last_point_nb = -1 self.__last_error = None self.input_config = dict(self.DefaultInputConfig) self.output_config = dict(self.DefaultOutputConfig)
#@:author:九世 #@:time:2019/7/31 #@:file:found.py from gevent import monkey monkey.patch_all() from multiprocessing import Process from gevent import lock import warnings import os import re import gevent Rlock = lock.RLock() warnings.filterwarnings("ignore") class Found: def __init__(self): self.file = 'save.txt' self.calc = 0 self.djcs = [] self.xcs = [] def request(self, url): command = 'ping.exe -n 1 {}'.format(url) zx = os.popen(command) jg = zx.read() host = re.search( '(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)', jg)
import os import re import shutil from subprocess import call, check_output from gevent import monkey, lock, spawn monkey.patch_all(socket=True, dns=True, time=True, select=True, thread=False, os=True, ssl=True, httplib=False, aggressive=True) import zerorpc from django.core.management.base import BaseCommand, CommandError from django.shortcuts import get_object_or_404 from django.conf import settings from django.db import connection from web.models import VHost, SSLCert, DefaultVHost from .update_vhosts import update_vhosts from .update_cronjobs import update_cronjobs update_lock = lock.RLock() def locked_update_vhosts(): with update_lock: update_vhosts() def locked_update_cronjobs(): with update_lock: update_cronjobs() class Backend(object): def update_vhosts(self): connection.close() spawn(locked_update_vhosts) def update_cronjobs(self): connection.close()
def makeLock(self): engine.locks[self.url] = lock.RLock();