def __init__(self, **kwargs): super().__init__(**kwargs) self._eventloop = IOLoop.current() self._key = generate_id(size=32).encode('utf-8') self._execution_lock = Lock(loop=self._eventloop.asyncio_loop) self._inner_state_lock = Lock(loop=self._eventloop.asyncio_loop) self._dep_tracker = DependencyTracker() self._exec_unit_container = ExecUnitContainer() self.formatter = DisplayFormatter() self.manager_ns = BuiltInManager() self.manager_ns.global_ns['__builtins__'] = builtin_mods.__dict__ self.manager_ns.global_ns['show_graph'] = self._show_graph self._execution_ctx = Executor(self._exec_unit_container, manager_ns=self.manager_ns) self.KernelTB = ultratb.AutoFormattedTB(mode='Plain', color_scheme='LightBG', tb_offset=1, debugger_cls=None) self._execution_queue = Queue(loop=self._eventloop.asyncio_loop) builtin_mods.show_graph = self._show_graph # mapping from variable name (target id) to (request id, generator # object) self._registered_generators = dict() self._eventloop.spawn_callback(self._execution_loop)
def __init__(self, items: Iterable[Item], *, retry_cnt: int = 2): self._items: Iterator[Item] = peekable(items) self._rescheduled_items: Set[Handle[Item]] = set() self._in_progress: Set[Handle[Item]] = set() # Synchronization primitives self._lock = Lock() self._new_items = Condition(lock=self._lock) self._eof = Condition(lock=self._lock)
def __init__(self, supervisor: Supervisor, options: BaseClient, misc: None) -> None: self._lock = Lock() self._bin: Optional[PurePath] = None self._proc: Optional[Process] = None self._cwd: Optional[PurePath] = None super().__init__(supervisor, options=options, misc=misc) go(supervisor.nvim, aw=self._install()) go(supervisor.nvim, aw=self._poll())
async def add_browser(self, pages: int = 1, server: str = None, launch_options: Dict[str, Any] = {}) -> Browser: """Launch a new browser.""" if 'proxy' in launch_options: self.set_launch_args_proxy(launch_options) # create screenshot directory if user wants screenshots. if launch_options.get('screenshot', False): self._set_screenshot_dir() # if server address is provided, launch browser on server. if server: browser = await self._launch_remote_browser(server, launch_options) else: # start a local browser. browser = await self._launch_local_browser(launch_options) # save browser data. self.browsers[browser] = { 'page_count': pages, 'launch_options': launch_options, 'server': server, 'consec_errors': 0, 'lock': Lock(), 'id': str(uuid4()) } # add callback that will be called in case of disconnection with Chrome Dev Tools. browser._connection.setClosedCallback(self.__on_connection_close) # add pages (tabs) to the new browser. # a new browser has 1 page by default, so add 1 less than desired page count. for _ in range(pages - 1): await browser.newPage() for page in await browser.pages(): await self._init_page(page)
async def _new_worker(self) -> BrowserWorker: """Construct a new worker.""" worker = BrowserWorker( disable_images=self.disable_images, garbage_collect_at=self.garbage_collect_at, launch_options=self.launch_options, max_consecutive_errors=self.max_consecutive_errors) # add lock attribute. worker.__lock = Lock() # call member functions for any provided default worker configuration. if self.cookies: await worker.set_cookie(*self.cookies) if self.default_nav_timeout: worker.set_default_nav_timeout(self.default_nav_timeout) if self.ad_block: await worker.set_ad_block() if self.blocked_urls: await worker.set_blocked_urls(self.blocked_urls) if self.evaluate_on_new_doc: await worker.evaluate_on_new_doc(self.evaluate_on_new_doc) if self.redirect_blocking_enabled: await worker.set_redirect_blocking_enabled() if self.request_abort_types: await worker.set_request_abort_types(self.request_abort_types) return worker
def __init__(self, hostname: str, port: int) -> None: """Sample API Client.""" self._hostname = hostname self._port = port # ensure only one connection at a time is established because the # inverter's firmware doesn't handle it well at the time of writing self._connection_lock = Lock()
def __init__(self, bot: Bot, channel: TextChannel): self.bot = bot self.channel: TextChannel = channel self.players: List[Player] = [] self.leavers: List[Player] = [] self.orderings: Dict[Callable, MatchFinder] = {} self.temp_messages: List[Message] = [] self.show_lobby_lock: Lock = Lock()
def __init__(self, max_traces: int = DEFAULT_MAX_TRACES, max_spans: int = DEFAULT_MAX_SPANS) -> None: self.__lock = Lock() self.__max_traces = max_traces self.__trace_ids: Deque[str] = deque([]) self.__traces: Dict[str, Set[str]] = {} self.__spans: Dict[str, Span] = {} self.__span_ids_by_operation_names: Dict[str, Set[str]] = {}
async def add_browser(self, pages: int = 1, server: str = None, launch_options: Dict[str, Any] = {}) -> Browser: """Launch a new browser.""" browser_data = { 'page_count': pages, 'launch_options': launch_options, 'server': server, 'consec_errors': 0, 'lock': Lock(), 'id': str(uuid4()) } # security_error_frac_proxy_switch is fraction of request history that is security errors. # type should be tuple of two elements (max_security_error_count, request_history_buffer_size) max_sec_err_frac = launch_options.get( 'security_error_frac_proxy_switch') if max_sec_err_frac: browser_data['security_check_history'] = deque( maxlen=max_sec_err_frac[1]) elif 'proxy' in launch_options: # start browser using proxy server. if 'args' not in launch_options: launch_options['args'] = [] # add proxy server to launch options if it has not already been added. if not any('--proxy-server' in arg for arg in launch_options['args']): launch_options['args'].append( f'--proxy-server="{launch_options["proxy"]}"') # check if screenshot directory needs to be created. if launch_options.get('screenshot', False) and self.screenshot_dir is None: # create screenshot directory for this run. self.screenshot_dir = log_dir.joinpath( f"screenshots_{self.start_time.strftime('%Y-%m-%d_%H:%M:%S')}") self.screenshot_dir.mkdir() # launch browser on server if server address is provided. if server: logger.info(f"""Launching remote browser on {server}: {pformat(launch_options)}""") browser = await self._launch_remote_browser(server, launch_options) else: # start a local browser. logger.info(f"""Launching local browser: {pformat(launch_options)}""") browser = await pyppeteer.launcher.launch(launch_options) # save browser data. self.browsers[browser] = browser_data # add callback that will be called in case of disconnection with Chrome Dev Tools. browser._connection.setClosedCallback(self.__on_connection_close) # add pages (tabs) to the new browser. # a new browser has 1 page by default, so add 1 less than desired page count. for _ in range(pages - 1): await browser.newPage() for page in await browser.pages(): await self._init_page(page)
def __init__(self, items: AsyncIterator[Item]): """ :param items: the items to be iterated over """ self._buffer: "asyncio.Queue[Item]" = asyncio.Queue(maxsize=1) self._incoming_finished = False self._buffer_task: Optional[ asyncio.Task] = asyncio.get_event_loop().create_task( self._fill_buffer(items)) """The items scheduled for reassignment to another consumer""" self._rescheduled_items: Set[Handle[Item]] = set() """The items currently assigned to consumers""" self._in_progress: Set[Handle[Item]] = set() # Synchronization primitives self._lock = Lock() self._new_items = Condition(lock=self._lock) self._eof = Condition(lock=self._lock)
async def mock_start(self): self._conn = await aiomysql.connect( host=docker_faf_db_config['host'], port=docker_faf_db_config['port'], user=docker_faf_db_config['user'], password=docker_faf_db_config['password'], db=docker_faf_db_config['db']) await self._conn.begin() # aiomysql has threadsafety 1, but we still want to use a single # connection for rolling back everything self._lock = Lock()
def __init__( self, fp: Union[str, BytesIO] = "file.pkl", *, create_file: bool = True, autosave: bool = True, autoload: bool = True, loop: Union[bool, AbstractEventLoop] = False ): self._exists = True self._create_file = create_file self._autosave = autosave self._autoload = autoload self._cache = dict() if loop: # Autosave and autoload cannot be used in async mode self._autoload = False self._autosave = False self._lock = Lock() # Use provided loop or get event loop if isinstance(loop, AbstractEventLoop): self._loop = loop else: self._loop = get_event_loop() else: # Async mode is off self._loop = None self._lock = None # Test and set file path # Exceptions will be raised if user doesn't have permission, file path # has a bad extension, or file does not exist and `create_file` is off try: self._fp = self.filepath(fp) except Exception as error: raise error # Autoload checks file modified time # Not compatible with BytesIO if isinstance(self._fp, BytesIO): self._autoload = False # Load initial data from file into cache if self._loop: self._loop.create_task(self._async_load()) else: self._load()
def __init__(self, **kwargs): super().__init__(**kwargs) self._eventloop = IOLoop.current() self._key = generate_id(size=32).encode('utf-8') self._execution_lock = Lock(loop=self._eventloop.asyncio_loop) self._inner_state_lock = Lock(loop=self._eventloop.asyncio_loop) self._dep_tracker = DependencyTracker() self._exec_unit_container = ExecUnitContainer() self.formatter = DisplayFormatter() self.ns_manager = BuiltInManager() self.initialize_builtins() self._execution_ctx = Executor(self._exec_unit_container, ns_manager=self.ns_manager) self.KernelTB = ultratb.AutoFormattedTB(mode='Plain', color_scheme='LightBG', tb_offset=1, debugger_cls=None) self._execution_queue = Queue(loop=self._eventloop.asyncio_loop) self._registered_generators = dict() self._eventloop.spawn_callback(self._execution_loop)
def __init__(self, maxCoroutineAmount: int = DefaultMaxCoroutinesAmount, maxCoroutineIdleTime: int = DefaultMaxCoroutineIdleTime, mostStop: bool = False, loop: BaseEventLoop = None): self._maxCoroutineAmount = maxCoroutineAmount self._maxCoroutineIdleTime = maxCoroutineIdleTime self._mostStop = mostStop self._lock = Lock() self._ready = [] self._loop = loop self._stopEventLoopSingal = Event() self._stopEvent = None self._coroutinesCount = 0
async def __replaceMessage(self, type: str, embed: Embed, is_temp: bool = True): if type not in self.locks: self.locks[type] = Lock() if type not in self.temp_messages: self.temp_messages[type] = [] await self.locks[type].acquire() try: ops = [self.__delMsgSafe(m) for m in self.temp_messages[type]] self.temp_messages[type] = [] if len(ops) > 0: await asyncio.wait(ops) msg = await self.channel.send(embed=embed) if is_temp: self.temp_messages[type].append(msg) except BaseException as exception: await handle(self.channel, exception) finally: self.locks[type].release()
class MediaPlayer: logger: logging.Logger = logging.Logger("racetime-obs") enabled: bool = False triggers: List[MediaTrigger] = [] chat_triggers: List[ChatTrigger] = [] timers: List[Timer] = [] triggers_lock: Lock = Lock() race_update_event: Event() play_media_callback = None started_at: datetime = None ping_chat_messages: bool = False chat_media_file: str = None last_session_race: str = "" monitoring_type: int = 0 def race_updated(self, race: Race, entrant_name: str): # so the sound doesn't play when the user starts obs next time if self.last_session_race == race.name: return self.started_at = race.started_at for trigger in self.triggers: self.logger.debug(trigger) if trigger.check_trigger(race, race.get_entrant_by_name(entrant_name)): self.play_media_callback(trigger.media_file_path, self.monitoring_type) self.logger.debug("trigger fired") def chat_updated(self, message: ChatMessage): for trigger in self.chat_triggers: self.logger.debug(trigger) if trigger.check_trigger(message): self.play_media_callback(trigger.media_file_path, self.monitoring_type) def add_trigger(self, media_file_path: str, place_trigger: int = None, entrant_count_trigger: int = None): async def add(media_file_path: str, place_trigger: int = None, entrant_count_trigger: int = None): async with self.triggers_lock: self.triggers.append( MediaTrigger(media_file_path, place_trigger=place_trigger, entrant_count_trigger=entrant_count_trigger)) asyncio.ensure_future( add(media_file_path, place_trigger, entrant_count_trigger)) def add_chat_trigger(self, media_file_path: str, highlight: bool = False, is_bot: bool = False, is_system: bool = False, message_plain_text: str = None): async def add(media_file_path, highlight, is_bot, is_system, message_plain_text): async with self.triggers_lock: self.chat_triggers.append( ChatTrigger(media_file_path, highlight, is_bot, is_system, message_plain_text)) pass asyncio.ensure_future( add(media_file_path, highlight, is_bot, is_system, message_plain_text)) def add_timer(self, media_file_path: str, race_time: timedelta): # try to wake up a little early and get ready timer = Timer(self.time_to_start_play(race_time), self.timer_wake_up, media_file_path, race_time) self.timers.append(timer) timer.start() def time_to_start_play(self, race_time: timedelta) -> float: time_to_start_play = 10000000000000.0 return time_to_start_play async def timer_wake_up(self, media_file_path: str, race_time: timedelta): asyncio.sleep(self.time_to_start_play(race_time)) self.logger.debug(f"attempting to play {media_file_path} at " f"{timer_to_str(race_time)}") asyncio.ensure_future( self.play_media_callback(media_file_path, self.monitoring_type)) def remove_trigger(self, index: int): async def remove(index: int): async with self.triggers_lock: if len(self.triggers) > index: self.triggers.remove(self.triggers[index]) asyncio.ensure_future(remove(index))
def __init__(self, *args, **kwargs): super(BitmexMonitor, self).__init__(*args, **kwargs) self._instrument_books = collections.defaultdict(dict) # TODO self._orderbooks = collections.defaultdict(dict) self._orderbooks_lock = Lock() self._orderbooks_use_snapshots = False
def __init__(self, directory: str, key: str): self.directory = directory self.key = load_pem_private_key(key.encode(), None, default_backend()) self.lock = Lock() self.auth = jwk_auth(jwk(self.key.public_key())) pass
class SmartQueue(Generic[Item]): def __init__(self, items: AsyncIterator[Item]): """ :param items: the items to be iterated over """ self._buffer: "asyncio.Queue[Item]" = asyncio.Queue(maxsize=1) self._incoming_finished = False self._buffer_task: Optional[ asyncio.Task] = asyncio.get_event_loop().create_task( self._fill_buffer(items)) """The items scheduled for reassignment to another consumer""" self._rescheduled_items: Set[Handle[Item]] = set() """The items currently assigned to consumers""" self._in_progress: Set[Handle[Item]] = set() # Synchronization primitives self._lock = Lock() self._new_items = Condition(lock=self._lock) self._eof = Condition(lock=self._lock) async def _fill_buffer(self, incoming: AsyncIterator[Item]): try: async for item in incoming: await self._buffer.put(item) async with self._lock: self._new_items.notify_all() self._incoming_finished = True async with self._lock: self._eof.notify_all() self._new_items.notify_all() except asyncio.CancelledError: pass async def close(self): if self._buffer_task: self._buffer_task.cancel() await self._buffer_task self._buffer_task = None def finished(self): return (not self.has_unassigned_items() and not (self._in_progress) and self._incoming_finished) def has_unassigned_items(self) -> bool: """Check if this queue has a new or rescheduled item immediately available.""" return bool(self._rescheduled_items) or bool(self._buffer.qsize()) def new_consumer(self) -> "Consumer[Item]": return Consumer(self) def __find_rescheduled_item( self, consumer: "Consumer[Item]") -> Optional[Handle[Item]]: return next( (handle for handle in self._rescheduled_items if consumer not in handle._prev_consumers), None, ) async def get(self, consumer: "Consumer[Item]") -> Handle[Item]: """Get a handle to the next item to be processed (either a new one or rescheduled).""" async with self._lock: while not self.finished(): handle = self.__find_rescheduled_item(consumer) if handle: self._rescheduled_items.remove(handle) self._in_progress.add(handle) handle.assign_consumer(consumer) return handle if self._buffer.qsize(): next_elem = await self._buffer.get() handle = Handle(next_elem, consumer=consumer) self._in_progress.add(handle) return handle await self._new_items.wait() self._new_items.notify_all() raise StopAsyncIteration async def mark_done(self, handle: Handle[Item]) -> None: """Mark an item, referred to by `handle`, as done.""" assert handle in self._in_progress, "handle is not in progress" async with self._lock: self._in_progress.remove(handle) self._eof.notify_all() self._new_items.notify_all() if _logger.isEnabledFor(logging.DEBUG): stats = self.stats() _logger.debug("status: " + ", ".join(f"{key}: {val}" for key, val in stats.items())) async def reschedule(self, handle: Handle[Item]) -> None: """Free the item for reassignment to another consumer.""" assert handle in self._in_progress, "handle is not in progress" async with self._lock: self._in_progress.remove(handle) self._rescheduled_items.add(handle) self._new_items.notify_all() async def reschedule_all(self, consumer: "Consumer[Item]"): """Make all items currently assigned to the consumer available for reassignment.""" async with self._lock: handles = [ handle for handle in self._in_progress if handle.consumer == consumer ] for handle in handles: self._in_progress.remove(handle) self._rescheduled_items.add(handle) self._new_items.notify_all() def stats(self) -> Dict: return { "locked": self._lock.locked(), "in progress": len(self._in_progress), "rescheduled": len(self._rescheduled_items), "in buffer": self._buffer.qsize(), "incoming finished": self._incoming_finished, } async def wait_until_done(self) -> None: """Wait until all items in the queue are processed.""" async with self._lock: while not self.finished(): await self._eof.wait()
def __init__(self) -> None: self.container: Dict[int, EmailMessage] = {} self.next_read_id = 0 self.next_set_id = 0 self.lock_getting = Lock() super().__init__()
import os import re from xml.etree import ElementTree from . import session _ANDROID_REPO_MANIFESTS = { 'addon': 'https://dl.google.com/android/repository/addon2-1.xml', 'package': 'https://dl.google.com/android/repository/repository2-1.xml', } _repo_manifests_cache = {} _repo_manifests_locks = {} for repo in _ANDROID_REPO_MANIFESTS.keys(): _repo_manifests_locks[repo] = Lock() async def _get_repo_manifest(repo): async with _repo_manifests_locks[repo]: if repo in _repo_manifests_cache: return _repo_manifests_cache[repo] repo_xml_url = _ANDROID_REPO_MANIFESTS[repo] async with session.get(repo_xml_url) as res: data = (await res.read()).decode('utf-8') repo_manifest = ElementTree.fromstring(data) _repo_manifests_cache[repo] = repo_manifest
def __init__(self, *args, **kwargs): super(OkexSpotMonitor, self).__init__(*args, **kwargs) self._orderbooks = collections.defaultdict(dict) self._orderbooks_lock = Lock() self._orderbooks_use_snapshots = False
import asyncio, async_timeout, time, re, json, os import aiohttp from bs4 import BeautifulSoup from asyncio.locks import Lock #用于存放变量的list,downLoadCountNow(当前下载的数量),NoPicAnswer(没有图片的回答),nimingCount(匿名回答数),zhiHuUsers(重复的知乎用户) variableList = [0, 0, 0, 0] #找出json中content的图片 findPicUrl = re.compile(r"https://pic\d\.zhimg\.com/v\d-.{32}_.{1,2}\.jpg") #找出url的大小控制字符 findSize = re.compile(r"_.{1,2}\.jpg") #获取锁 lock = Lock() def checkStr(str): """ 检查昵称是否包含windows不合法的命名字符,如果找到就用"_"代替 :param str: 回答者的昵称 :return: 过滤后的昵称 """ return re.sub(r"[\/\\\:\*\?\"\<\>\|]", "_", str) async def DownLoadPic(session, qestionSavePath, offset, questionId, downLoadCount): """ :param session: 建立好的链接
from asyncio.locks import Lock from candybot import exceptions, utils, commands from candybot.interface import database from candybot.engine import CandyValue, CandyDrop # The current state of channels # Channel ID -> Candy Drop STATE = {} # Mutex lock for STATE STATE_LOCK = Lock() async def setup(guild): database.set_settings(guild, ".", 0.2, 3, 5, 10, 100) database.set_settings_candy_add(guild, "candy", "🍬", 1) async def teardown(guild): database.teardown(guild) async def handle_message(message): """ Handles a Discord Message :param message: The Discord message """ # Firstly, filter the message if we don't care about it if not filter_message(message): return
class SmartQueue(Generic[Item], object): def __init__(self, items: Iterable[Item], *, retry_cnt: int = 2): self._items: Iterator[Item] = peekable(items) self._rescheduled_items: Set[Handle[Item]] = set() self._in_progress: Set[Handle[Item]] = set() # Synchronization primitives self._lock = Lock() self._new_items = Condition(lock=self._lock) self._eof = Condition(lock=self._lock) def has_new_items(self) -> bool: """Check whether this queue has any items that were not retrieved by any consumer yet.""" return bool(self._items) def has_unassigned_items(self) -> bool: """Check whether this queue has any unassigned items. An item is _unassigned_ if it's new (hasn't been retrieved yet by any consumer) or it has been rescheduled and is not in progress. A queue has unassigned items iff `get()` will immediately return some item, without waiting for an item that is currently "in progress" to be rescheduled. """ return self.has_new_items() or bool(self._rescheduled_items) def new_consumer(self) -> "Consumer[Item]": return Consumer(self) def __has_data(self): return self.has_unassigned_items() or bool(self._in_progress) def __find_rescheduled_item( self, consumer: "Consumer[Item]") -> Optional[Handle[Item]]: return next( (handle for handle in self._rescheduled_items if consumer not in handle._prev_consumers), None, ) async def get(self, consumer: "Consumer[Item]") -> Handle[Item]: async with self._lock: while self.__has_data(): handle = self.__find_rescheduled_item(consumer) if handle: self._rescheduled_items.remove(handle) self._in_progress.add(handle) handle.assign_consumer(consumer) return handle if self.has_new_items(): next_elem = next(self._items) handle = Handle(next_elem, consumer=consumer) self._in_progress.add(handle) return handle await self._new_items.wait() self._new_items.notify_all() raise StopAsyncIteration async def mark_done(self, handle: Handle[Item]) -> None: assert handle in self._in_progress, "handle is not in progress" async with self._lock: self._in_progress.remove(handle) self._eof.notify_all() self._new_items.notify_all() if _logger.isEnabledFor(logging.DEBUG): _logger.debug( f"status in-progress={len(self._in_progress)}, have_item={bool(self._items)}" ) async def reschedule(self, handle: Handle[Item]) -> None: assert handle in self._in_progress, "handle is not in progress" async with self._lock: self._in_progress.remove(handle) self._rescheduled_items.add(handle) self._new_items.notify_all() async def reschedule_all(self, consumer: "Consumer[Item]"): async with self._lock: handles = [ handle for handle in self._in_progress if handle.consumer == consumer ] for handle in handles: self._in_progress.remove(handle) self._rescheduled_items.add(handle) self._new_items.notify_all() def stats(self) -> Dict: return { "locked": self._lock.locked(), "items": bool(self._items), "in-progress": len(self._in_progress), "rescheduled-items": len(self._rescheduled_items), } async def wait_until_done(self) -> None: async with self._lock: while self.__has_data(): await self._eof.wait()
class Worker(BaseWorker[BaseClient, None]): def __init__(self, supervisor: Supervisor, options: BaseClient, misc: None) -> None: self._lock = Lock() self._bin: Optional[PurePath] = None self._proc: Optional[Process] = None self._cwd: Optional[PurePath] = None super().__init__(supervisor, options=options, misc=misc) go(supervisor.nvim, aw=self._install()) go(supervisor.nvim, aw=self._poll()) async def _poll(self) -> None: try: while True: await sleep(9) finally: proc = self._proc if proc: with suppress(ProcessLookupError): proc.kill() await proc.wait() async def _install(self) -> None: vars_dir = self._supervisor.vars_dir / "clients" / "t9" bin_path = t9_bin(vars_dir) if access(bin_path, X_OK): self._bin = bin_path else: for _ in range(9): await sleep(0) await awrite(self._supervisor.nvim, LANG("begin T9 download")) self._bin = await ensure_updated( vars_dir, retries=self._supervisor.limits.download_retries, timeout=self._supervisor.limits.download_timeout, ) if not self._bin: await awrite(self._supervisor.nvim, LANG("failed T9 download")) else: await awrite(self._supervisor.nvim, LANG("end T9 download")) async def _clean(self) -> None: proc = self._proc if proc: self._proc = None with suppress(ProcessLookupError): proc.kill() await proc.wait() async def _comm(self, cwd: PurePath, json: str) -> Optional[str]: async def cont() -> Optional[str]: async with self._lock: if self._bin and not self._proc: self._proc = await _proc(self._bin, cwd=cwd) if self._proc: self._cwd = cwd if not self._proc: return None else: assert self._proc.stdin and self._proc.stdout try: self._proc.stdin.write(encode(json)) self._proc.stdin.write(b"\n") await self._proc.stdin.drain() out = await self._proc.stdout.readline() except (ConnectionError, LimitOverrunError, ValueError): return await self._clean() else: return decode(out) if self._lock.locked(): return None else: return await shield(cont()) async def work(self, context: Context) -> AsyncIterator[Completion]: if self._cwd != context.cwd: await self._clean() if self._bin: req = _encode( self._supervisor.match, context=context, limit=self._supervisor.match.max_results, ) json = dumps(req, check_circular=False, ensure_ascii=False) reply = await self._comm(context.cwd, json=json) if reply: try: resp = loads(reply) except JSONDecodeError as e: log.warn("%s", e) else: for comp in _decode(self._options, reply=resp): yield comp
class DB: conn = {} redisObj = {} lock = Lock() def connectToDB(self): self.conn = mysql.connector.connect( host='localhost', password='', user='', database='mymav', client_flags=[ClientFlag.LOCAL_FILES]) self.conn.autocommit = True self.redisObj = redis.Redis( host= 'localhost', #redisautoscale.xle6db.0001.use1.cache.amazonaws.com port=6379, password='') def getenrolledcourses(self, userid): query = ( "select `courseid`,`section`,`instructor` from enrolledcourses where `userid`='" + str(userid) + "';") cursor = self.conn.cursor() cursor.execute(query) l = [] for i in cursor: l.append({'course': i[0], 'section': i[1], 'instructor': i[2]}) return l def getcourse(self, course, instructor): stime = time.time() try: query = ("select * from `mymav`.`classes` where `courseno`=" + str(course) + " or `instructor`='" + str(instructor) + "';") cursor = self.conn.cursor() cursor.execute(query) except: return False l = [] t = time.time() - stime l.append({'time': t}) for i in cursor: l.append({ 'course': i[0], 'section': i[1], 'title': i[2], 'day': i[4], 'instructor': i[3], 'start': i[5], 'end': i[6], 'max': i[7], 'enrolled': i[8] }) return l def validateLogin(self, uid, passw): query = ("select * from logindetails where `id`='" + str(uid) + "';") cursor = self.conn.cursor() cursor.execute(query) l = list(cursor) if (len(l) != 1): return 0 if (str(l[0][2]) != passw): return 1 query = ("select name from users where `userid`='" + str(uid) + "';") cursor.execute(query) l = list(cursor) self.userid = uid if (self.redisObj.get(uid) is None): self.redisObj.set(uid, time.time()) return l[0][0] def register(self, uid, password, name): query = ("select * from logindetails where `id`='" + str(uid) + "';") cursor = self.conn.cursor() cursor.execute(query) l = list(cursor) if (len(l) == 1): return 0 #user id already registered query2 = ( "INSERT INTO `mymav`.`logindetails` (`id`, `password`) VALUES ('" + str(uid) + "', '" + str(password) + "');") cursor.execute(query2) l = list(cursor) self.conn.commit() query3 = ("INSERT INTO `mymav`.`users` (`userid`, `name`) VALUES ('" + str(uid) + "', '" + str(name) + "');") cursor.execute(query3) l = list(cursor) self.conn.commit() return 1 def dropcourse(self, uid, course, instructor, section): self.conn.commit() query = ("DELETE FROM `mymav`.`enrolledcourses` WHERE `userid`='" + str(uid) + "' and `courseid`='" + str(course) + "';") cursor = self.conn.cursor() try: cursor.execute(query) except: return False l = list(cursor) self.conn.commit() return True def getallcourses(self): self.conn.commit() query = ('select * from `mymav`.`classes`;') cursor = self.conn.cursor() cursor.execute(query) l = [] for i in cursor: l.append({ 'course': i[0], 'section': i[1], 'title': i[2], 'day': i[4], 'instructor': i[3], 'start': i[5], 'end': i[6], 'max': i[7], 'enrolled': i[8] }) return l def enrollcourse(self, uid, course, instructor, section): query = ("select * from `enrolledcourses` where `userid`='" + str(uid) + "' and `courseid`='" + str(course) + "' and `section`='" + str(section) + "';") cursor = self.conn.cursor() try: cursor.execute(query) except: return 0 l = list(cursor) if (len(l) > 0): return 1 self.conn.commit() query = ( "INSERT INTO `mymav`.`enrolledcourses` (`userId`, `courseid`, `section`, `instructor`) VALUES ('" + str(uid) + "', '" + str(course) + "', '" + str(section) + "', '" + str(instructor) + "');") try: cursor.execute(query) self.conn.commit() except: return 0 l = list(cursor) return 2