class LRUProxyDict(MutableMapping): def __init__(self, proxy, *args, **kwargs): self._proxy = proxy self._cache = LRUCache(**kwargs) def __contains__(self, item): return item in self._cache or item in self._proxy def __getitem__(self, item): if item is None: raise ValueError("None key") v = self._cache.get(item, None) if v is not None: return v v = self._proxy.get(item, None) if v is not None: self._cache[item] = v return v def __setitem__(self, key, value): self._proxy[key] = value self._cache[key] = value def __delitem__(self, key): self._proxy.pop(key, None) self._cache.pop(key, None) def __iter__(self): return self._proxy.__iter__() def __len__(self): return len(self._proxy)
class CachedFeatureFlagStore(AbstractFeatureFlagStore): def __init__( self, store: AbstractFeatureFlagStore, size: int = DEFAULT_SIZE, ttl: Optional[int] = None, ) -> None: if ttl is not None: self._cache = TTLCache(size, ttl) else: self._cache = LRUCache(size) self._store = store self._ttl = ttl def create( self, feature_name: str, is_enabled: bool = False, client_data: Optional[dict] = None, ) -> FeatureFlagStoreItem: item = self._store.create( feature_name, is_enabled=is_enabled, client_data=client_data ) self._cache[feature_name] = item return item def get(self, feature_name: str) -> Optional[FeatureFlagStoreItem]: try: return self._cache[feature_name] except KeyError: pass item = self._store.get(feature_name) self._cache[feature_name] = item return item def set(self, feature_name: str, is_enabled: bool): self._store.set(feature_name, is_enabled) self._cache[feature_name] = self._store.get(feature_name) def delete(self, feature_name: str): self._store.delete(feature_name) self._cache.pop(feature_name, None) def list( self, limit: Optional[int] = None, offset: int = 0 ) -> Iterator[FeatureFlagStoreItem]: return self._store.list(limit=limit, offset=offset) def set_meta(self, feature_name: str, meta: FeatureFlagStoreMeta): self._store.set_meta(feature_name, meta) self._cache[feature_name] = self._store.get(feature_name)
class _DefaultROIManager(ROIManager, QObject): def __init__(self, parent: QObject = None): super().__init__(parent=parent) self._cache = LRUCache(maxsize=2048) # Store this many ROIs at once @staticmethod def _getCacheKey(roiFile: pwsdt.RoiFile): return os.path.split(roiFile.filePath)[0], roiFile.name, roiFile.number def removeRoi(self, roiFile: pwsdt.RoiFile): self._cache.pop(self._getCacheKey(roiFile)) roiFile.delete() self.roiRemoved.emit(roiFile) def updateRoi(self, roiFile: pwsdt.RoiFile, roi: pwsdt.Roi): roiFile.update(roi) self._cache[self._getCacheKey(roiFile)] = roiFile self.roiUpdated.emit(roiFile) def createRoi(self, acq: pwsdt.Acquisition, roi: pwsdt.Roi, roiName: str, roiNumber: int, overwrite: bool = False) -> pwsdt.RoiFile: """ Args: acq: The acquisition to save the ROI to roi: The ROI to save. roiName: The name to save the ROI as. roiNumber: The number to save the ROI as. overwrite: Whether to overwrite existing ROIs with conflicting name/number combo. Returns: A reference to the created ROIFile Raises: OSError: If `overwrite` is false and an ROIFile for this name and number already exists. """ try: roiFile = acq.saveRoi(roiName, roiNumber, roi, overwrite=overwrite) except OSError as e: raise e self._cache[self._getCacheKey(roiFile)] = roiFile self.roiCreated.emit(roiFile, overwrite) return roiFile @cachedmethod(lambda self: self._cache, key=lambda acq, roiName, roiNum: (acq.filePath, roiName, roiNum)) # Cache results def getROI(self, acq: pwsdt.Acquisition, roiName: str, roiNum: int) -> pwsdt.RoiFile: return acq.loadRoi(roiName, roiNum) def close(self): self._cache.clear()
class HpfeedsLogger: """ Log full complete sessions to hpfeeds. """ def __init__(self, host, port, ident, secret, ssl=None): super().__init__(*args, **kwargs) self.sessions = LRUCache(1000) self.session = ClientSession(host, port, ident, secret, ssl) self.exit_stack = AsyncExitStack() async def __aenter__(self): await self.exit_stack.enter_async_context(self.session) return super().__aenter__() def log(self, event): session_id = event['session_id'] if event['type'] == 'adbhoney.session.connect': self.sessions[session_id] = { 'src_ip': event['src_ip'], 'src_port': event['src_port'], 'dst_ip': event['dst_ip'], 'dst_port': event['dst_port'], 'sensor': event['sensor'], 'shasum': [], } return session = self.sessions.get(session_id, {}) if event['type'] == 'adbhoney.session.file_upload': session['shasum'].append(event['shasum']) elif event['type'] == 'adbhoney.session.closed': session.update({ 'closedmessage': event['closedmessage'], 'duration': event['duration'], }) try: self.publish('adbhoney', json.dumps(event)) finally: self.sessions.pop(session_id, None)
class MLStorageClient(object): """ Client binding for MLStorage Server API v1. """ def __init__(self, uri: str): """ Construct a new :class:`ClientV1`. Args: uri: Base URI of the MLStorage server, e.g., "http://example.com". """ uri = uri.rstrip('/') self._uri = uri self._storage_dir_cache = LRUCache(128) def _update_storage_dir_cache(self, doc): self._storage_dir_cache[doc['_id']] = doc['storage_dir'] @property def uri(self) -> str: """Get the base URI of the MLStorage server.""" return self._uri def do_request(self, method: str, endpoint: str, decode_json: bool = True, **kwargs) -> Union[requests.Response, Any]: """ Send request of HTTP `method` to given `endpoint`. Args: method: The HTTP request method. endpoint: The endpoint of the API, should start with a slash "/". For example, "/_query". decode_json: Whether or not to decode the response body as JSON? \\**kwargs: Arguments to be passed to :func:`requests.request`. Returns: The response object if ``decode_json = False``, or the decoded JSON object. """ uri = f'{self.uri}/v1{endpoint}' if 'json' in kwargs: json_obj = kwargs.pop('json') json_str = json_dumps(json_obj) kwargs['data'] = json_str kwargs.setdefault('headers', {}) kwargs['headers']['Content-Type'] = 'application/json' resp = requests.request(method, uri, **kwargs) resp.raise_for_status() if decode_json: content_type = resp.headers.get('content-type') or '' content_type = content_type.split(';', 1)[0] if content_type != 'application/json': raise IOError(f'The response from {uri} is not JSON: ' f'HTTP code is {resp.status_code}') resp = json_loads(resp.content) return resp def query(self, filter: Optional[FilterType] = None, sort: Optional[str] = None, skip: int = 0, limit: Optional[int] = None) -> List[DocumentType]: """ Query experiment documents according to the `filter`. Args: filter: The filter dict. sort: Sort by which field, a string matching the pattern ``[+/-]<field>``. "+" means ASC order, while "-" means DESC order. For example, "start_time", "+start_time" and "-stop_time". skip: The number of records to skip. limit: The maximum number of records to retrieve. Returns: The documents of the matched experiments. """ uri = f'/_query?skip={skip}' if sort is not None: uri += f'&sort={urlquote(sort)}' if limit is not None: uri += f'&limit={limit}' ret = self.do_request('POST', uri, json=filter or {}) for doc in ret: self._update_storage_dir_cache(doc) return ret def get(self, id: IdType) -> DocumentType: """ Get the document of an experiment by its `id`. Args: id: The id of the experiment. Returns: The document of the retrieved experiment. """ ret = self.do_request('GET', f'/_get/{id}') self._update_storage_dir_cache(ret) return ret def heartbeat(self, id: IdType) -> None: """ Send heartbeat packet for the experiment `id`. Args: id: The id of the experiment. """ self.do_request('POST', f'/_heartbeat/{id}', data=b'') def create(self, doc_fields: DocumentType) -> DocumentType: """ Create an experiment. Args: doc_fields: The document fields of the new experiment. Returns: The document of the created experiment. """ doc_fields = dict(doc_fields) ret = self.do_request('POST', '/_create', json=doc_fields) self._update_storage_dir_cache(ret) return ret def update(self, id: IdType, doc_fields: DocumentType) -> DocumentType: """ Update the document of an experiment. Args: id: ID of the experiment. doc_fields: The fields to be updated. Returns: The document of the updated experiment. """ ret = self.do_request('POST', f'/_update/{id}', json=doc_fields) self._update_storage_dir_cache(ret) return ret def add_tags(self, id: IdType, tags: Iterable[str]) -> DocumentType: """ Add tags to an experiment document. Args: id: ID of the experiment. tags: New tags to be added. Returns: The document of the updated experiment. """ old_doc = self.get(id) new_tags = old_doc.get('tags', []) for tag in tags: if tag not in new_tags: new_tags.append(tag) return self.update(id, {'tags': new_tags}) def delete(self, id: IdType) -> List[IdType]: """ Delete an experiment. Args: id: ID of the experiment. Returns: List of deleted experiment IDs. """ ret = self.do_request('POST', f'/_delete/{id}', data=b'') for i in ret: self._storage_dir_cache.pop(i, None) return ret def set_finished( self, id: IdType, status: str, doc_fields: Optional[DocumentType] = None) -> DocumentType: """ Set the status of an experiment. Args: id: ID of the experiment. status: The new status, one of {"RUNNING", "COMPLETED", "FAILED"}. doc_fields: Optional new document fields to be set. Returns: The document of the updated experiment. """ doc_fields = dict(doc_fields or ()) doc_fields['status'] = status ret = self.do_request('POST', f'/_set_finished/{id}', json=doc_fields) self._update_storage_dir_cache(ret) return ret def get_storage_dir(self, id: IdType) -> str: """ Get the storage directory of an experiment. Args: id: ID of the experiment. Returns: The storage directory of the experiment. """ id = str(id) storage_dir = self._storage_dir_cache.get(id, None) if storage_dir is None: doc = self.get(id) storage_dir = doc['storage_dir'] return storage_dir def get_file(self, id: IdType, path: str) -> bytes: """ Get the content of a file in the storage directory of an experiment. Args: id: ID of the experiment. path: Relative path of the file. Returns: The file content. """ id = str(id) path = normalize_relpath(path) return self.do_request('GET', f'/_getfile/{id}/{path}', decode_json=False).content
class PandoraLibraryProvider(backend.LibraryProvider): ROOT_DIR_NAME = 'Pandora' GENRE_DIR_NAME = 'Browse Genres' root_directory = models.Ref.directory(name=ROOT_DIR_NAME, uri=PandoraUri('directory').uri) genre_directory = models.Ref.directory(name=GENRE_DIR_NAME, uri=PandoraUri('genres').uri) def __init__(self, backend, sort_order): super(PandoraLibraryProvider, self).__init__(backend) self.sort_order = sort_order.lower() self.pandora_station_cache = LRUCache( maxsize=5, missing=self.get_station_cache_item) self.pandora_track_cache = LRUCache(maxsize=10) def browse(self, uri): self.backend.playback.reset_skip_limits() if uri == self.root_directory.uri: return self._browse_stations() if uri == self.genre_directory.uri: return self._browse_genre_categories() pandora_uri = PandoraUri.factory(uri) if isinstance(pandora_uri, GenreUri): return self._browse_genre_stations(uri) if isinstance(pandora_uri, StationUri): return self._browse_tracks(uri) def lookup(self, uri): pandora_uri = PandoraUri.factory(uri) if isinstance(pandora_uri, SearchUri): # Create the station first so that it can be browsed. station_uri = self._create_station_for_token(pandora_uri.token) track = self._browse_tracks(station_uri.uri)[0] # Recursive call to look up first track in station that was searched for. return self.lookup(track.uri) if isinstance(pandora_uri, TrackUri): try: track = self.lookup_pandora_track(uri) except KeyError: logger.exception( "Failed to lookup Pandora URI '{}'.".format(uri)) return [] else: track_kwargs = {'uri': uri} (album_kwargs, artist_kwargs) = {}, {} # TODO: Album.images has been deprecated in Mopidy 1.2. Remove this code when all frontends have been # updated to make use of the newer LibraryController.get_images() images = self.get_images([uri])[uri] if len(images) > 0: album_kwargs = {'images': [image.uri for image in images]} if isinstance(pandora_uri, AdItemUri): track_kwargs['name'] = 'Advertisement' if not track.title: track.title = '(Title not specified)' artist_kwargs['name'] = track.title if not track.company_name: track.company_name = '(Company name not specified)' album_kwargs['name'] = track.company_name else: track_kwargs['name'] = track.song_name track_kwargs['length'] = track.track_length * 1000 try: track_kwargs['bitrate'] = int(track.bitrate) except TypeError: # Bitrate not specified for this stream, ignore. pass artist_kwargs['name'] = track.artist_name album_kwargs['name'] = track.album_name else: raise ValueError( 'Unexpected type to perform Pandora track lookup: {}.'.format( pandora_uri.uri_type)) artist_kwargs[ 'uri'] = uri # Artist lookups should just point back to the track itself. track_kwargs['artists'] = [models.Artist(**artist_kwargs)] album_kwargs[ 'uri'] = uri # Album lookups should just point back to the track itself. track_kwargs['album'] = models.Album(**album_kwargs) return [models.Track(**track_kwargs)] def get_images(self, uris): result = {} for uri in uris: image_uris = set() try: track = self.lookup_pandora_track(uri) if track.is_ad is True: image_uri = track.image_url else: image_uri = track.album_art_url if image_uri: image_uris.update([image_uri]) except (TypeError, KeyError): pandora_uri = PandoraUri.factory(uri) if isinstance(pandora_uri, TrackUri): # Could not find the track as expected - exception. logger.exception( "Failed to lookup image for Pandora URI '{}'.".format( uri)) else: # Lookup logger.warning( "No images available for Pandora URIs of type '{}'.". format(pandora_uri.uri_type)) pass result[uri] = [models.Image(uri=u) for u in image_uris] return result def _formatted_station_list(self, list): # Find QuickMix stations and move QuickMix to top for i, station in enumerate(list[:]): if station.is_quickmix: quickmix_stations = station.quickmix_stations if not station.name.endswith(' (marked with *)'): station.name += ' (marked with *)' list.insert(0, list.pop(i)) break # Mark QuickMix stations for station in list: if station.id in quickmix_stations: if not station.name.endswith('*'): station.name += '*' return list def _browse_stations(self): station_directories = [] stations = self.backend.api.get_station_list() if stations: if self.sort_order == 'a-z': stations.sort(key=lambda x: x.name, reverse=False) for station in self._formatted_station_list(stations): # As of version 5 of the Pandora API, station IDs and tokens are always equivalent. # We're using this assumption as we don't have the station token available for deleting the station. # Detect if any Pandora API changes ever breaks this assumption in the future. assert station.token == station.id station_directories.append( models.Ref.directory(name=station.name, uri=PandoraUri.factory(station).uri)) station_directories.insert(0, self.genre_directory) return station_directories def _browse_tracks(self, uri): pandora_uri = PandoraUri.factory(uri) return [self.get_next_pandora_track(pandora_uri.station_id)] def _create_station_for_token(self, token): json_result = self.backend.api.create_station(search_token=token) new_station = Station.from_json(self.backend.api, json_result) self.refresh() return PandoraUri.factory(new_station) def _browse_genre_categories(self): return [ models.Ref.directory(name=category, uri=GenreUri(category).uri) for category in sorted(self.backend.api.get_genre_stations().keys()) ] def _browse_genre_stations(self, uri): return [ models.Ref.directory(name=station.name, uri=PandoraUri.factory(station).uri) for station in self.backend.api.get_genre_stations()[ PandoraUri.factory(uri).category_name] ] def lookup_pandora_track(self, uri): return self.pandora_track_cache[uri].track def get_station_cache_item(self, station_id): if re.match('^([SRCG])', station_id): pandora_uri = self._create_station_for_token(station_id) station_id = pandora_uri.station_id station = self.backend.api.get_station(station_id) station_iter = iterate_forever(station.get_playlist) return StationCacheItem(station, station_iter) def get_next_pandora_track(self, station_id): try: station_iter = self.pandora_station_cache[station_id].iter track = next(station_iter) except Exception: logger.exception('Error retrieving next Pandora track.') return None track_uri = PandoraUri.factory(track) if isinstance(track_uri, AdItemUri): track_name = 'Advertisement' else: track_name = track.song_name ref = models.Ref.track(name=track_name, uri=track_uri.uri) self.pandora_track_cache[track_uri.uri] = TrackCacheItem(ref, track) return ref def refresh(self, uri=None): if not uri or uri == self.root_directory.uri: self.backend.api.get_station_list(force_refresh=True) elif uri == self.genre_directory.uri: self.backend.api.get_genre_stations(force_refresh=True) else: pandora_uri = PandoraUri.factory(uri) if isinstance(pandora_uri, StationUri): try: self.pandora_station_cache.pop(pandora_uri.station_id) except KeyError: # Item not in cache, ignore pass else: raise ValueError( 'Unexpected URI type to perform refresh of Pandora directory: {}.' .format(pandora_uri.uri_type)) def search(self, query=None, uris=None, exact=False, **kwargs): search_text = self._formatted_search_query(query) if not search_text: # No value provided for search query, abort. logger.info('Unsupported Pandora search query: {}'.format(query)) return [] search_result = self.backend.api.search(search_text, include_near_matches=False, include_genre_stations=True) tracks = [] for genre in search_result.genre_stations: tracks.append( models.Track(uri=SearchUri(genre.token).uri, name='{} (Pandora genre)'.format( genre.station_name), artists=[models.Artist(name=genre.station_name)])) for song in search_result.songs: tracks.append( models.Track(uri=SearchUri(song.token).uri, name='{} (Pandora station)'.format( song.song_name), artists=[models.Artist(name=song.artist)])) artists = [] for artist in search_result.artists: search_uri = SearchUri(artist.token) if search_uri.is_artist_search: station_name = '{} (Pandora artist)'.format(artist.artist) else: station_name = '{} (Pandora composer)'.format(artist.artist) artists.append(models.Artist(uri=search_uri.uri, name=station_name)) return models.SearchResult(uri='pandora:search:{}'.format(search_text), tracks=tracks, artists=artists) def _formatted_search_query(self, query): search_text = [] for (field, values) in iter(query.items()): if not hasattr(values, '__iter__'): values = [values] for value in values: if field == 'any' or field == 'artist' or field == 'track_name': search_text.append(value) search_text = ' '.join(search_text) return search_text
class TelegramBot(object): def __init__(self, token, name, skip_offset=False, allowed_updates=None, agent=None, timeout=None): self.id = int(token.split(':')[0]) self.name = name self.token = token self.agent = agent self.last_update_id = -2 if skip_offset else -1 self.update_prehandlers = [] self.message_handlers = [] self.message_subscribers = LRUCache(maxsize=10000) self.message_prehandlers = [] self.message_next_handlers = LRUCache(maxsize=1000) self.retry_update = 0 self.allowed_updates = allowed_updates self.running = False self.inline_query_handler = None self.callback_query_handler = None self.chosen_inline_result_handler = None self.channel_post_handler = None self.on_updated_listener = None self.on_api_request_listener = None self.botan = None self.timeout = timeout self._noisy = False def method_url(self, method): return API_URL + 'bot' + self.token + '/' + method def start_update(self, default_delay=0, **kwargs): self.running = True @inlineCallbacks def update_bot(): if not self.running: return try: yield self.get_update(**kwargs) self.retry_update = default_delay except: log.failure("Couldn't get updates. Delaying for {delay} seconds", delay=self.retry_update) self.retry_update = min(self.retry_update + 3, 20) reactor.callLater(self.retry_update, update_bot) reactor.callWhenRunning(update_bot) def stop_update(self): self.running = False @inlineCallbacks def get_update(self, telegram_timeout=10, timeout=None, limit=100): payload = {'timeout': telegram_timeout, 'offset': self.last_update_id + 1, 'limit': limit} if self.allowed_updates: payload['allowed_updates'] = self.allowed_updates updates = yield self._request('getUpdates', params=payload, timeout=timeout) if self.on_updated_listener: self.on_updated_listener(updates) max_update_id = -1 inline_queries = [] chosen_inline_results = [] callback_queries = [] channel_posts = [] messages = [] for update in updates: if self._noisy: log.debug("New update. ID: {update_id}", update_id=update['update_id']) self._notify_update_prehandlers(update) if 'inline_query' in update: inline_queries.append(InlineQuery.de_json(update['inline_query'])) elif 'chosen_inline_result' in update: chosen_inline_results.append(ChosenInlineResult.de_json(update['chosen_inline_result'])) elif 'callback_query' in update: callback_queries.append(CallbackQuery.de_json(update['callback_query'])) elif 'channel_post' in update: channel_posts.append(ChannelPost(Message.de_json(update['channel_post']))) elif 'message' in update: msg = Message.de_json(update['message']) msg.bot_name = self.name # FIXME: a hack messages.append(msg) else: log.debug("Unsupported update type: {update}", update=json.dumps(update, skipkeys=True, ensure_ascii=False, default=lambda o: o.__dict__)) if update['update_id'] > max_update_id: max_update_id = update['update_id'] yield self.process_updates(inline_queries, chosen_inline_results, callback_queries, channel_posts, messages) self.last_update_id = max_update_id def process_updates(self, inline_queries, chosen_inline_results, callback_queries, channel_posts, messages): return DeferredList( [ self.process_updates_parallel_with_handler(self.inline_query_handler, inline_queries, self), self.process_updates_parallel_with_handler(self.chosen_inline_result_handler, chosen_inline_results, self), # TODO: maybe callback_queries and channel_posts need to be processed one by one (is order important?) self.process_updates_parallel_with_handler(self.callback_query_handler, callback_queries, self), self.process_updates_parallel_with_handler(self.channel_post_handler, channel_posts, self), self.process_messages(messages) ] ) @staticmethod def process_updates_parallel_with_handler(handler, updates, *args, **kwargs): if handler is not None and updates: return DeferredList([_map_function_to_deferred(handler, update, *args, **kwargs) for update in updates]) else: d = Deferred() d.callback(None) return d def process_message(self, message): # synchronously notify prehandlers self._notify_message_prehandlers(message) message_subscriber_handler_function = self._find_message_subscriber_handler_function(message) if message_subscriber_handler_function is not None: return _map_function_to_deferred(message_subscriber_handler_function, message, self) message_next_handler = self._find_message_next_handler(message) if message_next_handler is not None: return _map_function_to_deferred(message_next_handler, message, self) command_handler_function = self._find_command_handler_function(message) if command_handler_function is not None: return _map_function_to_deferred(command_handler_function, message, self) @inlineCallbacks def process_messages_in_order(self, messages): for message in messages: yield self.process_message(message) def process_messages(self, messages): if messages: return DeferredList([self.process_messages_in_order(messages_group[1]) for messages_group in groupby(sorted(messages, key=lambda m: m.chat.id), key=lambda m: m.chat.id)]) else: d = Deferred() d.callback(None) return d def process_inline_query(self, inline_query): if self.inline_query_handler: self.inline_query_handler(inline_query, self) def process_chosen_inline_query(self, chosen_inline_result): if self.chosen_inline_result_handler: self.chosen_inline_result_handler(chosen_inline_result, self) def _notify_update_prehandlers(self, update): for handler in self.update_prehandlers: handler(update, self) def _notify_message_prehandlers(self, message): for handler in self.message_prehandlers: handler(message, self) def _find_command_handler_function(self, message): for message_handler in self.message_handlers: if self._test_message_handler(message_handler, message): return message_handler['function'] return None def _find_message_subscriber_handler_function(self, message): if not hasattr(message, 'reply_to_message'): return None return self.message_subscribers.pop(message.reply_to_message.message_id, None) def _find_message_next_handler(self, message): return self.message_next_handlers.pop(message.chat.id, None) def register_message_handler(self, fn, commands=None, regexp=None, func=None, content_types=None): if not content_types: content_types = ['text'] func_dict = {'function': fn, 'content_types': content_types} if regexp: func_dict['regexp'] = regexp if 'text' in content_types else None if func: func_dict['lambda'] = func if commands: func_dict['commands'] = commands if 'text' in content_types else None self.message_handlers.append(func_dict) def message_handler(self, commands=None, regexp=None, func=None, content_types=None): def decorator(fn): self.register_message_handler(fn, commands, regexp, func, content_types) return fn return decorator @staticmethod def _test_message_handler(message_handler, message): if message.content_type not in message_handler['content_types']: return False if 'commands' in message_handler and message.content_type == 'text': cmd = extract_command(message.text) if cmd: for command_pattern in message_handler['commands']: if not command_pattern.endswith('$'): command_pattern += '$' if re.match(command_pattern, cmd): return True return False if 'regexp' in message_handler \ and message.content_type == 'text' \ and re.search(message_handler['regexp'], message.text): return True if 'lambda' in message_handler: return message_handler['lambda'](message) return False @inlineCallbacks def send_message(self, chat_id, text, disable_web_page_preview=None, reply_to_message_id=None, reply_markup=None, parse_mode=None): method = r'sendMessage' payload = {'chat_id': str(chat_id), 'text': text} if disable_web_page_preview: payload['disable_web_page_preview'] = disable_web_page_preview if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id if reply_markup: payload['reply_markup'] = _convert_markup(reply_markup) if parse_mode: payload['parse_mode'] = parse_mode request = yield self._request(method, 'POST', params=payload) returnValue(Message.de_json(request)) def answer_to_inline_query(self, query_id, results, personal=False, next_offset='', switch_pm_text=None, switch_pm_parameter=None): def _map_result(result): if isinstance(result, telegram.InlineQueryResult): return result.to_dict() else: return result payload = { 'inline_query_id': str(query_id), 'results': json.dumps([_map_result(res) for res in results]), 'is_personal': personal, 'next_offset': next_offset } if switch_pm_text: payload['switch_pm_text'] = switch_pm_text if switch_pm_parameter: payload['switch_pm_parameter'] = switch_pm_parameter return self._request('answerInlineQuery', 'POST', params=payload) @inlineCallbacks def edit_message_text(self, chat_id, message_id, text, parse_mode=None, disable_web_page_preview=None, reply_markup=None): method = r'editMessageText' payload = {'chat_id': str(chat_id), 'message_id': str(message_id), 'text': text} if disable_web_page_preview: payload['disable_web_page_preview'] = disable_web_page_preview if reply_markup: if isinstance(reply_markup, JsonSerializable): payload['reply_markup'] = reply_markup.to_json() elif isinstance(reply_markup, dict): payload['reply_markup'] = json.dumps(reply_markup) if parse_mode: payload['parse_mode'] = parse_mode request = yield self._request(method, 'POST', params=payload) returnValue(Message.de_json(request)) def set_webhook(self, url, certificate, max_connections=None): method = r'setWebhook' payload = {'url': url} files = None if not is_string(certificate): files = {'certificate': ('cert', certificate)} else: payload['certificate'] = certificate if max_connections: payload['max_connections'] = max_connections if self.allowed_updates: payload['allowed_updates'] = self.allowed_updates return self._make_request(method, 'POST', params=payload, files=files) def delete_webhook(self): method = r'deleteWebhook' return self._make_request(method, 'POST') @inlineCallbacks def delete_message(self, chat_id, message_id): method = r'deleteMessage' payload = {'chat_id': str(chat_id), 'message_id': str(message_id)} request = yield self._request(method, 'POST', params=payload) returnValue(request) @inlineCallbacks def answer_callback_query(self, callback_query_id, text=None, show_alert=None): method = r'answerCallbackQuery' payload = {'callback_query_id': str(callback_query_id)} if text: payload['text'] = text if show_alert: payload['show_alert'] = show_alert request = yield self._request(method, 'POST', params=payload) returnValue(request) @inlineCallbacks def get_file(self, file_id): method = r'getFile' payload = {'file_id': str(file_id)} request = yield self._request(method, 'POST', params=payload) returnValue(File.de_json(request)) def get_file_url(self, file): return "https://api.telegram.org/file/bot%s/%s" % (self.token, file.path) @inlineCallbacks def send_audio(self, chat_id, audio, filename='audio', duration=None, performer=None, title=None, caption=None, reply_to_message_id=None, reply_markup=None, timeout=None): method = r'sendAudio' payload = {'chat_id': chat_id} files = None if not is_string(audio): files = {'audio': (filename, audio)} else: payload['audio'] = audio if duration: payload['duration'] = duration if performer: payload['performer'] = performer if title: payload['title'] = title if caption: payload['caption'] = caption if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id if reply_markup: payload['reply_markup'] = _convert_markup(reply_markup) request = yield self._request(method, 'POST', params=payload, files=files, timeout=timeout) returnValue(Message.de_json(request)) def reply_to(self, message, text, **kwargs): return self.send_message(message.chat.id, text, reply_to_message_id=message.message_id, **kwargs) def send_chat_action(self, chat_id, action): method = r'sendChatAction' payload = {'chat_id': chat_id, 'action': action} return self._make_request(method, 'POST', params=payload) def register_for_reply(self, message, callback): self.message_subscribers[message.message_id] = callback def register_next_chat_handler(self, chat_id, callback): self.message_next_handlers[chat_id] = callback @inlineCallbacks def _request(self, method_name, method='get', params=None, data=None, files=None, timeout=None, **kwargs): if self.on_api_request_listener: self.on_api_request_listener(method_name) result_json = yield self._make_request(method_name, method, params=params, data=data, files=files, timeout=timeout, **kwargs) returnValue(result_json['result']) @inlineCallbacks def _make_request(self, method_name, method='get', params=None, data=None, files=None, timeout=None, **kwargs): request_url = API_URL + 'bot' + self.token + '/' + method_name params = _convert_utf8(params) if timeout is None: timeout = self.timeout resp = yield treq.request(method, request_url, params=params, data=data, files=files, timeout=timeout, agent=self.agent, **kwargs) result_json = yield _check_response(resp, method_name) returnValue(result_json)
class TelegramBot: def __init__(self, token, name): self.name = name self.token = token self.agent = Agent(reactor) self.last_update_id = -1 self.message_handlers = [] self.message_subscribers = LRUCache(maxsize=10000) self.message_prehandlers = [] self.message_next_handlers = LRUCache(maxsize=1000) self.retry_update = 0 self.running = False self.inline_query_handler = None self.callback_query_handler = None self.chosen_inline_result_handler = None self.botan = None def method_url(self, method): return API_URL + 'bot' + self.token + '/' + method def start_update(self): self.running = True @inlineCallbacks def update_bot(): if not self.running: return try: yield self.get_update() self.retry_update = 0 reactor.callWhenRunning(update_bot) except: log.failure("Couldn't get updates. Delaying for %d seconds" % self.retry_update) reactor.callLater(self.retry_update, update_bot) self.retry_update = min(self.retry_update + 3, 20) reactor.callWhenRunning(update_bot) def stop_update(self): self.running = False @inlineCallbacks def get_update(self): payload = {'timeout': 20, 'offset': self.last_update_id + 1} updates = yield _request(self.token, 'getUpdates', params=payload, timeout=25) new_messages_ids = set() new_messages = [] for update in updates: log.debug("New update. ID: {update_id}", update_id=update['update_id']) if update['update_id'] > self.last_update_id: self.last_update_id = update['update_id'] if 'inline_query' in update.keys(): inline_query = InlineQuery.de_json(update['inline_query']) self.process_inline_query(inline_query) elif 'chosen_inline_result' in update.keys(): chosen_inline_result = ChosenInlineResult.de_json(update['chosen_inline_result']) self.process_chosen_inline_query(chosen_inline_result) elif 'message' in update.keys(): msg = Message.de_json(update['message']) msg.bot_name = self.name if not msg.from_user.id in new_messages_ids: new_messages.append(msg) new_messages_ids.add(msg.from_user.id) elif 'callback_query' in update.keys(): callback_query = CallbackQuery.de_json(update['callback_query']) self.process_callback_query(callback_query) else: log.debug("Unknown update type: {update}", update=json.dumps(update, skipkeys=True, ensure_ascii=False, default=lambda o: o.__dict__)) if len(new_messages) > 0: self.process_new_messages(new_messages) def process_callback_query(self, callback_query): if self.callback_query_handler: self.callback_query_handler(callback_query, self) def process_new_messages(self, new_messages): self._notify_message_prehandlers(new_messages) not_processed = [] for message in new_messages: if not self._notify_message_next_handler(message): not_processed.append(message) new_messages = not_processed self._notify_command_handlers(new_messages) self._notify_message_subscribers(new_messages) def process_inline_query(self, inline_query): if self.inline_query_handler: self.inline_query_handler(inline_query, self) def process_chosen_inline_query(self, chosen_inline_result): if self.chosen_inline_result_handler: self.chosen_inline_result_handler(chosen_inline_result, self) def _notify_message_prehandlers(self, new_messages): for message in new_messages: for handler in self.message_prehandlers: handler(message, self) def _notify_command_handlers(self, new_messages): for message in new_messages: for message_handler in self.message_handlers: if self._test_message_handler(message_handler, message): message_handler['function'](message, self) break def _notify_message_subscribers(self, new_messages): for message in new_messages: if not hasattr(message, 'reply_to_message'): continue handler = self.message_subscribers.pop(message.reply_to_message.message_id, None) if handler is not None: handler(message, self) def _notify_message_next_handler(self, message): handler = self.message_next_handlers.pop(message.chat.id, None) if handler is not None: handler(message, self) return True return False def register_message_handler(self, fn, commands=None, regexp=None, func=None, content_types=None): if not content_types: content_types = ['text'] func_dict = {'function': fn, 'content_types': content_types} if regexp: func_dict['regexp'] = regexp if 'text' in content_types else None if func: func_dict['lambda'] = func if commands: func_dict['commands'] = commands if 'text' in content_types else None self.message_handlers.append(func_dict) def message_handler(self, commands=None, regexp=None, func=None, content_types=None): def decorator(fn): self.register_message_handler(fn, commands, regexp, func, content_types) return fn return decorator @staticmethod def _test_message_handler(message_handler, message): if message.content_type not in message_handler['content_types']: return False if 'commands' in message_handler and message.content_type == 'text': cmd = extract_command(message.text) if cmd: for command_pattern in message_handler['commands']: if not command_pattern.endswith('$'): command_pattern += '$' if re.match(command_pattern, cmd): return True return False if 'regexp' in message_handler \ and message.content_type == 'text' \ and re.search(message_handler['regexp'], message.text): return True if 'lambda' in message_handler: return message_handler['lambda'](message) return False @inlineCallbacks def send_message(self, chat_id, text, disable_web_page_preview=None, reply_to_message_id=None, reply_markup=None, parse_mode=None): method = r'sendMessage' payload = {'chat_id': str(chat_id), 'text': text} if disable_web_page_preview: payload['disable_web_page_preview'] = disable_web_page_preview if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id if reply_markup: if isinstance(reply_markup, JsonSerializable): payload['reply_markup'] = reply_markup.to_json() elif isinstance(reply_markup, dict): payload['reply_markup'] = json.dumps(reply_markup) if parse_mode: payload['parse_mode'] = parse_mode request = yield _request(self.token, method, 'POST', params=payload) returnValue(Message.de_json(request)) @inlineCallbacks def answer_to_inline_query(self, query_id, results, personal=False): request = yield _request(self.token, 'answerInlineQuery', 'POST', params={ 'inline_query_id': str(query_id), 'results': json.dumps(results, ensure_ascii=False), 'is_personal': personal }) returnValue(request) @inlineCallbacks def edit_message_text(self, chat_id, message_id, text, parse_mode=None, disable_web_page_preview=None, reply_markup=None): method = r'editMessageText' payload = {'chat_id': str(chat_id), 'message_id': str(message_id), 'text': text} if disable_web_page_preview: payload['disable_web_page_preview'] = disable_web_page_preview if reply_markup: if isinstance(reply_markup, JsonSerializable): payload['reply_markup'] = reply_markup.to_json() elif isinstance(reply_markup, dict): payload['reply_markup'] = json.dumps(reply_markup) if parse_mode: payload['parse_mode'] = parse_mode request = yield _request(self.token, method, 'POST', params=payload) returnValue(Message.de_json(request)) @inlineCallbacks def answer_callback_query(self, callback_query_id, text=None, show_alert=None): method = r'answerCallbackQuery' payload = {'callback_query_id': str(callback_query_id)} if text: payload['text'] = text if show_alert: payload['show_alert'] = show_alert request = yield _request(self.token, method, 'POST', params=payload) returnValue(request) @inlineCallbacks def get_file(self, file_id): method = r'getFile' payload = {'file_id': str(file_id)} request = yield _request(self.token, method, 'POST', params=payload) returnValue(File.de_json(request)) def get_file_url(self, file): return "https://api.telegram.org/file/bot%s/%s" % (self.token, file.path) @inlineCallbacks def send_audio(self, chat_id, audio, filename='audio', duration=None, performer=None, title=None, reply_to_message_id=None, reply_markup=None, timeout=30): method = r'sendAudio' payload = {'chat_id': chat_id} files = None if not is_string(audio): files = {'audio': (filename, audio)} else: payload['audio'] = audio if duration: payload['duration'] = duration if performer: payload['performer'] = performer if title: payload['title'] = title if reply_to_message_id: payload['reply_to_message_id'] = reply_to_message_id # if reply_markup: # payload['reply_markup'] = _convert_markup(reply_markup) request = yield _request(self.token, method, 'POST', params=payload, files=files, timeout=timeout) returnValue(Message.de_json(request)) def reply_to(self, message, text, **kwargs): return self.send_message(message.chat.id, text, reply_to_message_id=message.message_id, **kwargs) def send_chat_action(self, chat_id, action): method = r'sendChatAction' payload = {'chat_id': chat_id, 'action': action} return _make_request(self.token, method, 'POST', params=payload) def register_for_reply(self, message, callback): self.message_subscribers[message.message_id] = callback def register_next_chat_handler(self, chat_id, callback): self.message_next_handlers[chat_id] = callback
class ApiClientV1(object): """ Thin client binding for API v1. """ def __init__(self, base_uri): """ Construct a new :class:`ClientV1`. Args: base_uri (str): Base URI of the MLStorage server, e.g., "http://example.com". """ base_uri = base_uri.rstrip('/') self._base_uri = base_uri self._storage_dir_cache = LRUCache(128) def _update_storage_dir_cache(self, doc): self._storage_dir_cache[doc['id']] = doc['storage_dir'] @property def base_uri(self): """Get the base URI of the MLStorage server.""" return self._base_uri def do_request(self, method, endpoint, **kwargs): """ Do `method` request against given `endpoint`. Args: method (str): The HTTP request method. endpoint (str): The endpoint of the API, should start with a slash "/". For example, "/_query". \**kwargs: Arguments to be passed to :func:`requests.request`. Returns: The response object. """ uri = self.base_uri + '/v1' + endpoint resp = requests.request(method, uri, **kwargs) if resp.status_code != 200: raise RuntimeError('HTTP error {}: {}'. format(resp.status_code, resp.text)) return resp def query(self, filter=None, skip=0, limit=10): ret = self.do_request( 'POST', '/_query?skip={}&limit={}'.format(skip, limit), json=filter or {}).json() for doc in ret: self._update_storage_dir_cache(doc) return ret def get(self, id): id = validate_experiment_id(id) ret = self.do_request('GET', '/_get/{}'.format(id)).json() self._update_storage_dir_cache(ret) return ret def heartbeat(self, id): id = validate_experiment_id(id) return self.do_request( 'POST', '/_heartbeat/{}'.format(id), data=b'').json() def create(self, name, doc_fields=None): doc_fields = dict(doc_fields or ()) doc_fields['name'] = name doc_fields = validate_experiment_doc(doc_fields) ret = self.do_request('POST', '/_create', json=doc_fields).json() self._update_storage_dir_cache(ret) return ret def update(self, id, doc_fields): id = validate_experiment_id(id) doc_fields = validate_experiment_doc(dict(doc_fields)) ret = self.do_request( 'POST', '/_update/{}'.format(id), json=doc_fields).json() self._update_storage_dir_cache(ret) return ret def set_finished(self, id, status, doc_fields): id = validate_experiment_id(id) doc_fields = dict(doc_fields or ()) doc_fields['status'] = status doc_fields = validate_experiment_doc(doc_fields) ret = self.do_request( 'POST', '/_set_finished/{}'.format(id), json=doc_fields).json() self._update_storage_dir_cache(ret) return ret def delete(self, id): id = validate_experiment_id(id) ret = self.do_request( 'POST', '/_delete/{}'.format(id), data=b'').json() for i in ret: self._storage_dir_cache.pop(i, None) return ret def get_storage_dir(self, id): id = str(validate_experiment_id(id)) storage_dir = self._storage_dir_cache.get(id, None) if storage_dir is None: doc = self.get(id) storage_dir = doc['storage_dir'] return storage_dir def getfile(self, id, path): id = str(validate_experiment_id(id)) path = validate_relpath(path) return self.do_request( 'GET', '/_getfile/{}/{}'.format(id, path)).content