async def handle_auto_start(self): services = await self.should_start() logger.info("Autostarting services", items=services) for item in services: svc = await self.get(item) if not svc.is_active() and image_navigator.is_native(svc.name): await self.run_service(svc.name)
async def restart_container(self, name): conts = await self.containers(as_dict=True) if name in conts.keys(): c = conts[name] logger.info(f"restarting container {c.name}") await c.restart() return True
async def logs_reader(self, docker, container: DockerContainer, channel: Channel, name, cid): log_reader = container.logs subscriber = log_reader.subscribe() unixts = int(time()) await scheduler.spawn(log_reader.run(since=int(unixts / 1000))) while True: log_record = await subscriber.get() ts, id = idgen.take() if log_record is None: logger.info('closing docker logs reader') break mv = memoryview(log_record) if len(log_record) <= 8: logger.warn('small shit', len=len(log_record), b64val=b64encode(log_record).decode()) continue message = bytes(mv[8:]).decode('utf-8', 'replace') source = logs_sources.get(str(mv[0]), '') size = struct.unpack('>L', mv[4:8])[0] msg = LogRecord(id, ts, cid, name, source, size, message) await channel.publish(msg)
async def nonblock_func(seconds): logger.info(f'starting nonblock sleep {seconds}') for c in count(): if c == seconds / 0.05: return c await asyncio.sleep(0.045) logger.info('puk ', c=c)
async def remove_container(self, name): # removing if running try: container = await self.get(name) if container: await container.fill() if container.state == 'running': container_autoremove = container.auto_removable() logger.info("Stopping container") await container.stop() if not container_autoremove: await container.delete() else: await container.delete() await asyncio.sleep(0.5) # try: # await container.wait(condition="removed") # except DockerError as e: # logger.debug('Docker 404 received on wait request') # if e.status != 404: # raise e except DockerError: logger.exception('container remove exc') return True
async def websocket_handler(request): ws = web.WebSocketResponse() try: await ws.prepare(request) sender = await scheduler.spawn(ws_sender(ws)) async for msg in ws: if msg.type == aiohttp.WSMsgType.TEXT: if msg.data == 'close': await ws.close() else: # MSG handler pass # await ws.send_str(msg.data + '/answer') elif msg.type == aiohttp.WSMsgType.ERROR: print('ws connection closed with exception %s' % ws.exception()) print('websocket connection closed') except CancelledError: logger.info('CancelledError fetched') except Exception: logger.exception('ex') finally: await sender.close() return ws
async def stop_container(self, name): conts = await self.containers(as_dict=True) if name in conts.keys(): c = conts[name] logger.info(f"stopping container {c.name}") await c.stop() return True
async def user_by_phone(phone, **params): """ Find user by number """ logger.info('user_by_phone', phone=phone) for uid, user in state.users.items(): if phone == user.phone and user.sess_no: return dict(uid=uid, sess_no=user.sess_no) logger.warn('cant find associated session', uid=uid, phone=phone)
async def download_db(): try: if not os.path.isfile(TRG): logger.info('downloading database. cmd: %s', CMD) out = subprocess.call(CMD, shell=True) logger.info('download result %s', out) state['geodata'] = maxminddb.open_database(TRG) except Exception: logger.exception('download err')
async def init(uid, data, **params): partner_name = data.get('p', None) partner = state.get_partner(partner_name) if partner and partner.init: logger.info('init: partner and init link found. redirecting') pix = partner.init.format(partner_id=uid) return response.redirect(pix) logger.warn('partner not found', p=partner_name) return response.pixel()
async def main(key, data, **params): """ Handle zadarma validation webhook """ logger.info('request', data=data, key=key) zd_echo = data.pop('zd_echo', None) if zd_echo: return response.data(zd_echo) return {}
def add_http_handler(handler, path, **kwargs): logger.info('Adding route', path=path) async def wrapper(request): return await request_handler(request, handler) return [ RouteDef('GET', path, wrapper, kwargs), RouteDef('POST', path, wrapper, kwargs) ]
async def call(name, method, **params): """ Call service method Use timeout__ param to set RPC response timeout """ logger.info(f'Calling method "{method}" with params "{params}"') res = await rpc.request(name, method, **params) if isinstance(res, BaseBandResponse): res = res._asdict() return res
async def welcome_notify(rec, num): msg = { 'chat_id': rec.chat_id, 'text': settings.msg.notify[num].format(mention=rec.mention) } logger.info('msg', msg=msg) await send_msg(msg) if num == 3: await query_url('kickChatMember', dict(chat_id=rec.chat_id, user_id=rec.user_id)) state.joined.pop(str(rec.user_id))
async def hello_msg(message): user = message.new_chat_member or message['from'] mention_ = mention(user) text = settings.msg.hello.format(mention=mention_) state.joined[str(user.id)] = pdict(time=now(), chat_id=message.chat.id, user_id=user.id, mention=mention_, phase=-1) msg = {'chat_id': message.chat['id'], 'text': text} logger.info('sending', msg=msg) await send_msg(msg)
async def init(uid, data, **params): """ Will be redirected to partner `sync` location. Otherwice pixel will be returned and written error to logs. """ partner_name = data.get('p', None) partner = state.get_partner(partner_name) if partner and partner.init: logger.info('init: partner and init link found. redirecting') pix = partner.init.format(partner_id=uid) return response.redirect(pix) logger.warn('partner not found', p=partner_name) return response.pixel()
async def worker(): logger.info('isolve_status_bot has been started') # Get current webhook status webhook = await bot.get_webhook_info() logger.error(f'Old webhook: {webhook}') # If URL is bad if webhook.url != WEBHOOK_URL: # If URL doesnt match current - remove webhook if not webhook.url: await bot.delete_webhook() # Set new URL for webhook await bot.set_webhook(WEBHOOK_URL) await status_checker()
async def startup(): """ Load database on startup """ try: if not os.path.isfile(settings.db_file): raise FileNotFoundError("db file not found") state.geodata = GeoLocator(settings.db_file, MODE_BATCH | MODE_MEMORY) logger.info('DB version', dbver=state.geodata.get_db_version(), dbdate=state.geodata.get_db_date()) except Exception: logger.exception('error while opening database file')
async def send_sms(to=None, msg=None, to_phone=None): to = to or to_phone if not to or not msg: raise Exception('wronds params') logger.info(f'sending sms to {to}') curr_count = await redis.get(to + str(date.today())) curr_count = int(curr_count or 0) if to != '79261244141' and curr_count >= 3: logger.info('too many requests') return {'error': 'too many requests'} await redis.increx(to, 60 * 60 * 24) params = dict(sender=settings.sender, to=to, msg=msg) result = await api_call(settings.endpoint.format(**params)) logger.debug('send_sms', p=params, r=result) return result
async def loader(): try: if not os.path.isfile(settings.db_file): raise FileNotFoundError("db file not found") state.db = maxminddb.open_database(settings.db_file) logger.info('DB loaded') for num in count(): info = await cache_info() logger.info('cache stat', loop=num, info=info) await asyncio.sleep(60 * 5) except asyncio.CancelledError: pass except Exception: logger.exception('error while opening database file')
async def available_ports(self): available_ports = set(range(self.start_port, self.end_port)) conts = await self.containers(fullinfo=True) used_ports = set() for cont in conts: cports = cont.ports logger.info('container ports', cname=cont.name, cports=cports) if not cports: logger.warn('no ports', cname=cont.name, cports=cports) for p in cports: used_ports.add(p) logger.info(f"ports used summary", used_ports=used_ports) return available_ports - used_ports - self.reserved_ports
async def enrich(key, **params): """ Handle incoming calls """ if key in settings.use_keys: phone = params.pop('phone') event = params.pop('event') if phone and event and event == START_EVENT: user = await rpc.request(CTRACK, USER_BY_PHONE, phone=phone) if user: logger.info('user', u=user) uid = user.get('uid', None) sess_no = user.get('sess_no', None) if uid: return {'uid': str(uid), 'sess_no': sess_no} return {}
async def done(uid, data, **params): """ Receiving syncronization results and show pixel """ partner_name = data.pop('p', None) partner_id = data.pop('pi', None) partner = state.get_partner(partner_name) user_id = data.pop('ui', None) if uid != user_id: logger.warn('user ids not equal') return response.pixel() if partner and partner_id and user_id: logger.info('done: params given. saving', p=partner_name, pid=partner_id, u=user_id) await save_match(uid, partner_name, partner_id) else: logger.warn('not enough params', p=partner_name, pid=partner_id, u=user_id) return response.pixel()
async def sync(uid, data, **params): """ Will be redirected to partner `done` location. If error occur will be returned pixel. """ partner_name = data.pop('p', None) partner_id = data.pop('pi', None) partner = state.get_partner(partner_name) if partner and partner_id: logger.info('sync: partner found. saving match', p=partner_name) await save_match(uid, partner_name, partner_id) if partner.sync: logger.info('sync: sync pixel configured. redirecting') pix = partner.sync.format(partner_id=uid, user_id=partner_id) return response.redirect(pix) logger.warn('partner not found', p=partner_name) return response.pixel()
async def download_db(): while True: try: await app['rpool'].subscribe(app['mpsc'].channel('any')) logger.info('subscribed any') async with aiohttp.ClientSession() as session: while True: batch = state.grab() if len(batch): enc = json.dumps(batch, ensure_ascii=False) q = urllib.parse.urlencode( {'data': base64.encodebytes(enc.encode())}) async with session.post(MP_TRACK_EP, data=q) as resp: logger.info('uploading %s items. code %s', len(batch), resp.status) await asyncio.sleep(1) pass except Exception: logger.exception('err - root loop')
async def checker(): scheduler = await aiojobs.create_scheduler(limit=CONCURRENT_CHECKS) while True: state.myloop() try: params = settings.proxy_checker headers = {"Authorization": params.auth} async with aiohttp.ClientSession(headers=headers) as s: async with s.get(params.list, timeout=5) as r: for proxy in await r.json(): p = Prodict.from_dict(proxy) await scheduler.spawn(chech(p, params)) except Exception: logger.exception('err') jobs = scheduler._jobs while True: await asyncio.sleep(0.1) if len(jobs) == 0: logger.info('finished') break
async def run(name, **req_params): """ Create image and run new container with service params: pos - string contains prefered coordinates, for example "2x3" (col x row) nocache - Set docker build option. By default nocache=false. auto_remove - Set docker build option. env - """ if not image_navigator.is_native(name): return 404 logger.debug('Called api.run with', params=req_params) params = RunParams(pos=ServicePostion.from_string(req_params.get('pos')), build_opts=build_options_from_req(req_params)) svc = await state.get(name, params=params) logger.info('request with params', params=params, srv_config=svc.config) svc = await state.run_service(name, no_wait=True) return svc.full_state()
async def uploader(): while True: try: async with aiohttp.ClientSession() as session: while True: if len(state.buffer): buff = state.buffer.copy() state.buffer.clear() enc = ujson.dumps(buff, ensure_ascii=False) q = urllib.parse.urlencode( {'data': base64.encodebytes(enc.encode())}) async with session.post(settings.endpoint, data=q) as resp: logger.info('uploading', items=len(buff), status=resp.status) await asyncio.sleep(1) pass except asyncio.CancelledError: break except Exception: logger.exception('exc') await asyncio.sleep(5)
async def create_image(self, img, img_options): logger.debug("Building image", n=img.name, io=img_options, path=img.path) async with img.create(img_options) as builder: progress = pdict() struct = builder.struct() last = time() async for chunk in await self.dc.images.build(**struct): if isinstance(chunk, dict): chunk = pdict.from_dict(chunk) if chunk.aux: struct.id = chunk.aux.ID logger.debug('chunk', chunk=chunk) elif chunk.status and chunk.id: progress[chunk.id] = chunk if time() - last > 1: logger.info("\nDocker build progress", progress=progress) last = time() elif chunk.stream: # logger.debug('chunk', chunk=chunk) step = re.search(r'Step\s(\d+)\/(\d+)', chunk.stream) if step: logger.debug('Docker build step ', groups=step.groups()) else: logger.debug('unknown chunk', chunk=chunk) else: logger.debug('unknown chunk type', type=type(chunk), chunk=chunk) if not struct.id: raise Exception('Build process not completed') logger.info('Docker image created', struct_id=struct.id) return img.set_data(await self.dc.images.get(img.name))
async def startup(): try: logger.info('executing: %s', CMD) out = subprocess.call(CMD, shell=True) logger.info('download result: %s', out) gl = state['geodata'] = GeoLocator(TRG, MODE_BATCH | MODE_MEMORY) logger.info('DB version %s (%s)', gl.get_db_version(), gl.get_db_date()) except Exception: logger.exception('download err')