Exemplo n.º 1
0
class VerifyHashCache:
    """Cache handler to make it quick password check by bypassing
    already checked passwords against exact same couple of token/password.
    This cache handler is more efficient on small apps that
    run on few processes as cache is only shared between threads."""
    def __init__(self):
        ttl = config_value("VERIFY_HASH_CACHE_TTL", default=(60 * 5))
        max_size = config_value("VERIFY_HASH_CACHE_MAX_SIZE", default=500)

        try:
            from cachetools import TTLCache

            self._cache = TTLCache(max_size, ttl)
        except ImportError:
            # this should have been checked at app init.
            raise

    def has_verify_hash_cache(self, user):
        """Check given user id is in cache."""
        return self._cache.get(user.id)

    def set_cache(self, user):
        """When a password is checked, then result is put in cache."""
        self._cache[user.id] = True

    def clear(self):
        """Clear cache"""
        self._cache.clear()
Exemplo n.º 2
0
class InMemoryCacheHandler:
    def __init__(self):
        dns_cache_size = int(setup_config_data_holder.dns_cache_size)
        dns_cache_ttl = int(setup_config_data_holder.dns_cache_ttl)
        self.cache = TTLCache(maxsize=dns_cache_size, ttl=dns_cache_ttl)

    def add_to_cache(self, key, value):
        self.cache[key] = value
        return self.cache

    def get_from_cache(self, key):
        return self.cache.get(key)

    def check_if_present(self, required_key):
        if required_key in self.cache.keys():
            return True
        else:
            return False

    def show_cache(self):
        logger.debug(f"[Process: Show Cache], "
                     f"CurrentCacheSize: [{self.cache.currsize}], "
                     f"[MaxCacheSize: {self.cache.maxsize}], "
                     f"[CacheTTL: {self.cache.ttl}], "
                     f"[CacheTimer: {self.cache.timer}]")

    def clear_cache(self):
        logger.debug(f"[Process: Clearing Cache], "
                     f"CurrentCacheSize: [{self.cache.currsize}], "
                     f"[MaxCacheSize: {self.cache.maxsize}], "
                     f"[CacheTTL: {self.cache.ttl}], "
                     f"[CacheTimer: {self.cache.timer}]")
        self.cache.clear()
Exemplo n.º 3
0
 def test_atomic(self):
     cache = TTLCache(maxsize=1, ttl=1, timer=Timer(auto=True))
     cache[1] = 1
     self.assertEqual(1, cache[1])
     cache[1] = 1
     self.assertEqual(1, cache.get(1))
     cache[1] = 1
     self.assertEqual(1, cache.pop(1))
     cache[1] = 1
     self.assertEqual(1, cache.setdefault(1))
     cache[1] = 1
     cache.clear()
     self.assertEqual(0, len(cache))
Exemplo n.º 4
0
 def test_ttl_atomic(self):
     cache = TTLCache(maxsize=1, ttl=1, timer=Timer(auto=True))
     cache[1] = 1
     self.assertEqual(1, cache[1])
     cache[1] = 1
     self.assertEqual(1, cache.get(1))
     cache[1] = 1
     self.assertEqual(1, cache.pop(1))
     cache[1] = 1
     self.assertEqual(1, cache.setdefault(1))
     cache[1] = 1
     cache.clear()
     self.assertEqual(0, len(cache))
Exemplo n.º 5
0
class InternalCache:
    """Simple Caching structure"""
    def __init__(self,
                 maxsize=float("inf"),
                 time_to_live=24 * 60 * 60) -> None:
        self._cache = TTLCache(maxsize=maxsize, ttl=time_to_live)

    def update(self, url: str, file_path: str, value: str) -> None:
        key = hash((url, file_path))
        self._cache[key] = value

    def get(self, url: str, file_path: str) -> Union[str, None]:
        key = hash((url, file_path))
        return self._cache.get(key, None)

    def clear(self) -> None:
        self._cache.clear()
Exemplo n.º 6
0
class StackCache:
    """堆栈缓存

    使用运行内存作为高速缓存,可有效提高并发的处理能力

    """

    def __init__(self, maxsize=0xff, ttl=60):

        self._cache = TTLCache(maxsize, ttl)

    def has(self, key):

        return key in self._cache

    def get(self, key, default=None):

        return self._cache.get(key, default)

    def set(self, key, val):

        self._cache[key] = val

    def incr(self, key, val=1):

        res = self.get(key, 0) + val

        self.set(key, res)

        return res

    def decr(self, key, val=1):

        res = self.get(key, 0) - val

        self.set(key, res)

        return res

    def delete(self, key):

        del self._cache[key]

    def size(self):

        return len(self._cache)

    def clear(self):

        return self._cache.clear()
Exemplo n.º 7
0
class NetEaseApi(object):
    """ 网易云音乐api客户端

    :param cookie_path: 本地保存路径。如果不设置,则不会持久化。
    :param cache_path: 网络请求的本地缓存路径,如果不设置,则只在内存缓存。
    :param cache_ttl: 请求缓存的保留时间。
    :param logger: 日志记录器。
    """
    def __init__(self,
                 *,
                 cookie_path=None,
                 cache_path=None,
                 cache_ttl=300,
                 cache_size=100,
                 logger=None):
        self.session = requests.Session()
        self.logger = logger or logging.getLogger('NetEaseApi')

        # cookies persistent
        self._cookies_path = cookie_path
        self.session.cookies = _initialize_cookies(cookie_path)

        # cache persistent
        self._cache_path = cache_path
        self._cache_ttl = cache_ttl
        self._cache_size = cache_size
        if cache_path:
            self.request_cache = TTLCacheP(cache_size, cache_ttl, cache_path)
        else:
            self.request_cache = TTLCache(cache_size, cache_ttl)

        self.request = cached(self.request_cache, cache_key)(self._request)

        # get login status
        resp = self.get_user_account()
        self.profile: UserProfile = resp['profile']
        self.account: UserAccount = resp['account']

    @property
    def csrf_token(self) -> str:
        for cookie in self.session.cookies:
            if cookie.name == '__csrf':
                return cookie.value
        return ''

    def _request(self,
                 method,
                 path,
                 params=None,
                 default=None,
                 custom_cookies=None,
                 raw=False):
        """ 发送请求

        这个函数会准备好csrf token、将请求的api路径转换为合法的url、处理并保存cookies。

        如果服务器返回的响应提示错误,会抛出``NetEaseError``。

        :param method: 请求的HTTP方法
        :param path: 请求的API路径
        :param params: 请求的参数,当方法是GET时使用Query String传递,使用POST时使用form传递。
        :param default: 响应内容为空时或无效时使用的默认响应内容,注意``message``字段会在无法解析响应json时替换成``unable to decode response``。
        :param custom_cookies: 自定义的cookies,注意,这些cookies会在本次会话中一直存在,直到你退出登录或被服务器丢弃。
        :return: 返回服务端的响应内容
        """
        if not params:
            params = {}

        if not default:
            default = {'code': -1}

        if not custom_cookies:
            custom_cookies = {}

        endpoint = '{}{}'.format(c.BASE_URL, path)

        params.update({'csrf_token': self.csrf_token})
        data = default.copy()

        for key, value in custom_cookies.items():
            cookie = make_cookie(key, value)
            self.session.cookies.set_cookie(cookie)

        params = encrypted_request(params)

        method = method.upper()
        if method == 'GET':
            resp = self.session.get(endpoint,
                                    headers=c.HEADERS,
                                    data=params,
                                    timeout=c.DEFAULT_TIMEOUT)
        elif method == 'POST':
            resp = self.session.post(endpoint,
                                     headers=c.HEADERS,
                                     data=params,
                                     timeout=c.DEFAULT_TIMEOUT)
        else:
            raise ValueError(f'unexpected method {method}')

        resp.raise_for_status()

        if not raw:
            try:
                data = resp.json()
            except json.JSONDecodeError:
                data['message'] = 'unable to decode response'

            raise_for_code(data)
            return data
        else:
            return resp.content

    def clear_cache(self):
        """ 清除请求缓存
        """
        self.request_cache.clear()

    def login_if_need(self, username: str, password: str, country_code: str = '86') -> \
            Union[LoginResp, GetUserAccountResp]:
        """ 需要时登录

        如果已经登录,相当于调用 get_user_account。

        如果未登录,相当于调用 login

        :param username: 邮箱或者手机号
        :param password: 密码
        :param country_code: 国家码
        :rtype: Union[LoginResp, GetUserAccountResp]
        """
        try:
            return self.get_user_account()
        except NetEaseError as e:
            if e.code == 301:
                return self.login(username, password, country_code)
            else:
                raise

    def login(self,
              username: str,
              password: str,
              country_code: str = '86') -> LoginResp:
        """ 登录网易云音乐账号

        支持邮箱登录和手机号码登录,返回登录结果。

        :param username: 用户名,如果是纯数字字符串,自动使用手机号码登录接口
        :param password: 密码
        :param country_code: 国家码
        :return: 正常返回字典,否则抛出异常
        """
        username = username.strip()
        password = md5(password.encode('utf-8')).hexdigest()

        if self._cookies_path and os.path.isfile(self._cookies_path):
            self.session.cookies.load()

        if username.isdigit():
            path = '/weapi/login/cellphone'
            params = dict(
                phone=username,
                password=password,
                countrycode=country_code,
                rememberLogin='******',
            )
        else:
            # magic token for login
            # see https://github.com/Binaryify/NeteaseCloudMusicApi/blob/master/router/login.js#L15
            client_token = '1_jVUMqWEPke0/1/Vu56xCmJpo5vP1grjn_SOVVDzOc78w8OKLVZ2JH7IfkjSXqgfmh'
            path = '/weapi/login'
            params = dict(
                username=username,
                password=password,
                countrycode=country_code,
                rememberLogin='******',
                clientToken=client_token,
            )

        data = self.request('POST', path, params, custom_cookies={'os': 'pc'})
        if data.get('code', -1) == 200:
            self.profile = data['profile']
            self.account = data['account']

        if isinstance(self.session.cookies, LWPCookieJar):
            self.session.cookies.save()

        return LoginResp(data)

    def get_user_account(self) -> GetUserAccountResp:
        return GetUserAccountResp(
            self.request('POST', '/api/nuser/account/get'))

    def logout(self):
        """ 登出

        清除所有cookie和storage里的用户信息
        """
        self.profile = None
        self.account = None
        self.session.cookies = _initialize_cookies(self._cookies_path)

    def login_refresh(self):
        """ 刷新登录状态

        :return: 不返回
        """
        self.request('GET', '/weapi/login/token/refresh', raw=True)

    def daily_task(self, is_mobile: bool = True):
        """ 每日签到

        :param is_mobile: 是否是手机签到
        :return: 正常返回字典{point,code},错误抛出异常
        """
        path = '/weapi/point/dailyTask'
        params = dict(type=0 if is_mobile else 1)
        return self.request('POST', path, params)

    def get_user_playlist(self,
                          uid=None,
                          offset=0,
                          limit=50) -> GetUserPlaylistResp:
        """ 查看用户歌单

        :param uid: 用户ID,不传递时指自己的歌单
        :param offset: 分页选项,偏移值。
        :param limit: 分页选项,一次获取的项目数限制。
        :return: 正常返回字典
        """
        if not uid and not self.profile:
            raise ValueError('需要登录')

        path = '/weapi/user/playlist'
        params = dict(uid=uid or self.profile.user_id,
                      offset=offset,
                      limit=limit)
        return GetUserPlaylistResp(self.request('POST', path, params))

    def get_recommend_resource(self) -> GetRecommendResourceResp:
        """ 获得日推歌单列表。

        注意,这个不是日推。

        日推请转到 :meth:`nemcore.api.NetEaseApi.get_recommend_songs`
        """
        path = '/weapi/v1/discovery/recommend/resource'
        return GetRecommendResourceResp(self.request('POST', path))

    def get_recommend_songs(self,
                            total: bool = True,
                            offset: int = 0,
                            limit: int = 20) -> GetRecommendSongsResp:
        """ 获得每日推荐歌曲

        :param total: 未知。是否一次获取全部?
        :param offset: 分页选项,偏移值。
        :param limit: 分页选项,一次获取的项目数限制。
        :return: 返回今日推荐歌曲清单
        """
        if not self.profile:
            raise ValueError('需要登录')

        path = '/weapi/v1/discovery/recommend/songs'
        params = dict(total='true' if total else 'false',
                      offset=offset,
                      limit=limit)
        return GetRecommendSongsResp(self.request('POST', path, params))

    def get_personal_fm(self) -> GetPersonalFMResp:
        """ 私人FM
        """
        path = '/weapi/v1/radio/get'
        return GetPersonalFMResp(self.request('POST', path))

    def fm_like(self,
                songid: int,
                like: bool = True,
                time: int = 25,
                alg: str = 'itembased') -> FMLikeResp:
        """ 私人FM操作:喜欢

        :param songid: 歌曲id
        :param like: 喜欢或不喜欢
        :param time: 未知
        :param alg: 未知。fm推荐算法类型?
        """
        path = '/weapi/radio/like'
        params = {
            'alg': alg,
            'trackId': songid,
            'like': 'true' if like else 'false',
            'time': time
        }
        return FMLikeResp(self.request('POST', path, params))

    def fm_trash(self,
                 songid: int,
                 time: int = 25,
                 alg: str = 'RT') -> FMTrashResp:
        """ 私人FM操作:不喜欢

        NOTE: 这个API可能影响云音乐今后的日推和FM结果。
        :param songid: 歌曲id
        :param time: 未知
        :param alg: 未知
        """
        path = '/weapi/radio/trash/add'
        params = dict(
            songId=songid,
            alg=alg,
            time=time,
        )
        return FMTrashResp(self.request('POST', path, params))

    def search(self, keywords: str, stype: int = 1, total: bool = True, offset: int = 0, limit: int = 50) -> \
            SearchResp:
        """ 搜索歌曲

        :param keywords: 搜索关键词
        :param stype: 搜索类型,可选值:单曲(1),歌手(100),专辑(10),歌单(1000),用户(1002) *(type)*
        :param total: TODO
        :param offset: 搜索结果偏移,和limit结合做分页
        :param limit: 搜索结果集大小,和ofset结合做分页
        """
        path = '/weapi/search/get'
        params = {
            's': keywords,
            'type': stype,
            'offset': offset,
            'total': total,
            'limit': limit
        }
        return SearchResp(self.request('POST', path, params))

    def get_new_albums(self,
                       offset: int = 0,
                       limit: int = 50) -> GetNewAlbumsResp:
        """ 新碟上架

        :param offset: 结果偏移,和limit结合做分页
        :param limit: 结果集大小,和offset结合做分页
        """
        path = '/weapi/album/new'
        params = dict(
            area='ALL',
            offset=offset,
            total=True,
            limit=limit,
        )
        return GetNewAlbumsResp(self.request('POST', path, params))

    def get_top_playlists(self, category: str = '全部', order: str = 'hot', offset: int = 0, limit: int = 50) -> \
            GetTopPlaylistsResp:
        """ 歌单(网友精选碟)

        对应[首页>>发现音乐>>歌单](http://music.163.com/#/discover/playlist/)

        :param category: 歌单分类
        :param order: 排序方法,可选值 TODO
        :param offset: 结果偏移,和Limit结合做分页
        :param limit: 结果集大小,和offset结合做分页
        """
        path = '/weapi/playlist/list'
        params = {
            'cat': category,
            'order': order,
            'offset': offset,
            'total': 'true',
            'limit': limit
        }
        return GetTopPlaylistsResp(self.request('POST', path, params))

    def get_playlist_detail(self,
                            playlist_id: int,
                            offset=0,
                            limit=100) -> GetPlaylistDetailResp:
        """ 歌单详情

        :param limit: 用于分页
        :param offset: 用于分页
        :param playlist_id: 歌单ID
        """
        path = '/weapi/v3/playlist/detail'
        params = {
            'id': playlist_id,
            'total': 'true',
            'limit': limit,
            'n': 1000,
            'offest': offset,
        }
        # cookie添加os字段
        custom_cookies = dict(os=platform.system())
        return GetPlaylistDetailResp(
            self.request('POST', path, params, {'code': -1}, custom_cookies))

    def get_top_artists(self,
                        offset: int = 0,
                        limit: int = 100) -> GetTopArtistsResp:
        """ 热门歌手

        对应[首页>>发现音乐>>歌手](https://music.163.com/#/discover/artist/)

        :param offset: 结果偏移,和Limit结合做分页
        :param limit: 结果集大小,和offset结合做分页
        """
        path = '/weapi/artist/top'
        params = {'offset': offset, 'total': True, 'limit': limit}
        return GetTopArtistsResp(self.request('POST', path, params))

    def get_top_songs(self,
                      idx: int = 0,
                      offset: int = 0,
                      limit: int = 100) -> GetPlaylistDetailResp:
        """ 热门单曲榜

        对应[首页>>发现音乐>>排行榜](https://music.163.com/#/discover/toplist?id=3779629)

        :param idx: 榜单ID,参考 netease.TOP_LIST_ALL
        :param offset: 结果偏移,和Limit结合做分页
        :param limit: 结果集大小,和offset结合做分页
        """
        playlist_id = c.TOP_LIST_ALL[idx][1]
        return self.get_playlist_detail(playlist_id,
                                        offset=offset,
                                        limit=limit)

    def get_artist_info(self, artist_id: int) -> GetArtistInfoResp:
        """ 获取歌手信息

        包括热门单曲等。

        :param artist_id: 歌手ID
        """
        path = '/weapi/v1/artist/{}'.format(artist_id)
        return GetArtistInfoResp(self.request('POST', path))

    def get_artist_albums(self,
                          artist_id: int,
                          offset: int = 0,
                          limit: int = 50) -> GetArtistAlbumsResp:
        """ 获取歌手专辑

        :param artist_id: 歌手ID
        :param offset: 结果偏移,和Limit结合做分页
        :param limit: 结果集大小,和offset结合做分页
        """
        path = '/weapi/artist/albums/{}'.format(artist_id)
        params = dict(offset=offset, total=True, limit=limit)
        return GetArtistAlbumsResp(self.request('POST', path, params))

    def get_album_info(self, album_id: int) -> GetAlbumInfoResp:
        """ 获取专辑信息

        :param album_id: 专辑ID
        """
        path = '/weapi/v1/album/{}'.format(album_id)
        return GetAlbumInfoResp(self.request('POST', path))

    def get_song_comments(self, music_id: int, total: bool = False, offset: int = 0, limit: int = 100) -> \
            GetSongCommentsResp:
        """ 获取歌曲评论

        :param music_id: 歌曲ID
        :param offset: 结果偏移,和Limit结合做分页
        :param total: TODO
        :param limit: 结果集大小,和offset结合做分页
        """
        path = '/weapi/v1/resource/comments/R_SO_4_{}/'.format(music_id)
        params = {
            'rid': music_id,
            'offset': offset,
            'total': 'true' if total else 'false',
            'limit': limit
        }
        return GetSongCommentsResp(self.request('POST', path, params))

    def get_songs_detail(self, ids: Sequence[int]) -> GetSongDetailResp:
        """ 获取歌曲详情

        :param ids: 歌曲ID列表
        """
        path = '/weapi/v3/song/detail'
        params = {
            'c': json.dumps([dict(id=_id) for _id in ids]),
            'ids': json.dumps(ids)
        }
        return GetSongDetailResp(self.request('POST', path, params))

    def get_songs_url(self, ids: Sequence[int],
                      quality: int) -> GetSongURLResp:
        """ 获取歌曲播放url

        :param ids: 歌曲id列表
        :param quality: 音质档次,可选值0-2,分别对应 320kbps,192kbps,128kbps 三种质量
        """
        rate_map = {0: 320000, 1: 192000, 2: 128000}

        path = '/weapi/song/enhance/player/url'
        params = dict(ids=ids, br=rate_map[quality])
        return GetSongURLResp(self.request('POST', path, params))

    def get_song_lyric(self, music_id: int) -> GetSongLyricResp:
        """ 获取歌词

        ApiEndpoint: http://music.163.com/api/song/lyric?os=osx&id= &lv=-1&kv=-1&tv=-1

        :param music_id: 歌曲ID
        """
        path = '/weapi/song/lyric'
        params = dict(os='osx', id=music_id, lv=-1, kv=-1, tv=-1)
        return GetSongLyricResp(self.request('POST', path, params))

    def get_dj_channels(self,
                        offset: int = 0,
                        limit: int = 50) -> GetDjChannelsResp:
        """ 热门主播电台

        今日最热(0), 本周最热(10),历史最热(20),最新节目(30)

        对应[首页>>发现音乐>>主播电台](https://music.163.com/#/discover/djradio)

        :param offset: 分页选项,偏移值
        :param limit: 分页选项,一次获取的项目数限制。
        """
        path = '/weapi/djradio/hot/v1'
        params = {'limit': limit, 'offset': offset}
        return GetDjChannelsResp(self.request('POST', path, params))

    def get_dj_programs(self,
                        radio_id: int,
                        asc: bool = False,
                        offset: int = 0,
                        limit: int = 50) -> GetDjProgramsResp:
        """获取电台频道信息

        :param radio_id: id
        :param asc: 按升序排序, defaults to False
        :param offset: 分页选项,偏移值, defaults to 0
        :param limit: 分页选项,一次获取的项目数限制, defaults to 50
        :return: 电台信息
        """
        path = '/weapi/dj/program/byradio'
        params = dict(asc=asc, radioId=radio_id, offset=offset, limit=limit)
        return GetDjProgramsResp(self.request('POST', path, params))
Exemplo n.º 8
0
class MicrosoftGraphHelper:
    def __init__(self, tenant_id, client_id, client_secret):
        self.__cache = TTLCache(
            maxsize=1, ttl=55 *
            60)  # Set to expire after 55 minutes so it is always fresh

        self.__tenant_id = tenant_id
        self.__client_id = client_id,
        self.__client_secret = client_secret
        self.__get_cache('microsoft_security_graph_access_token')

    def __set_cache(self, d):
        try:
            for k, v in d.items():
                self.__cache.update([(k, v)])
        except AttributeError:
            for k, v in d.items():
                self.__cache.update([(k, v)])

    def __get_cache(self, key):
        if key not in self.__cache:
            self.__set_cache({key: self.__refresh_access_token()})
        return self.__cache.get(key)

    def __refresh_access_token(self):
        token_url = 'https://login.microsoftonline.com/{}/oauth2/v2.0/token'.format(
            self.__tenant_id)
        post_data = {
            "client_id": self.__client_id,
            "scope": ["https://graph.microsoft.com/.default"],
            "client_secret": self.__client_secret,
            "grant_type": "client_credentials"
        }
        r = requests.post(token_url, data=post_data)
        log.debug(r.content)
        json = r.json()
        return json.get("access_token")

    def check_status_code(self, response):
        if 200 <= response.status_code <= 299:
            return True
        # Access token has expired, request a new one
        elif response.status_code == 401:
            log.debug(response.content)
            access_token = self.__refresh_access_token()
            self.__set_cache(
                {"microsoft_security_graph_access_token": access_token})
            return False
        else:
            raise ValueError("Invalid response from Microsoft Security Graph")

    def microsoft_graph_request(self, method, url, headers, json=None):
        r = None
        for i in list(range(2)):
            if method == "GET":
                r = requests.get(url, headers=headers)
            elif method == "PATCH":
                r = requests.patch(url, headers=headers, json=json)
            else:
                raise ValueError("{} not implemented.".format(method))

            if self.check_status_code(r):
                break
            # If it fails a second time, something more serious is wrong, ie: creds, query, etc.
            elif i == 1:
                log.info(r.content)
                return False
        return r

    def get_access_token(self):
        return self.__get_cache("microsoft_security_graph_access_token")

    def clear_cache(self):
        self.__cache.clear()
Exemplo n.º 9
0
class FeishuBot:
    def __init__(self,
                 app_id,
                 app_secret,
                 base_url='https://open.feishu.cn/open-apis',
                 token_ttl=None):
        self.app_id = app_id
        self.app_secret = app_secret
        self.base_url = base_url
        self.token_cache = TTLCache(1, token_ttl or timedelta(hours=1).seconds)

    @retry(stop=stop_after_attempt(3),
           wait=wait_fixed(1),
           retry=retry_if_exception_type(TokenExpiredError))
    async def request(self, method, endpoint, *args, **kwargs):
        url = f'{self.base_url}{endpoint}'
        no_auth = kwargs.pop('no_auth', False)
        if no_auth:
            # skip auth, for getting token itself
            headers = kwargs.pop('headers', {})
        else:
            # attach the token by default
            token = await self.get_access_token()
            headers = {
                'Authorization': f'Bearer {token}',
                **kwargs.pop('headers', {})
            }

        async with ClientSession() as session:
            async with session.request(method,
                                       url,
                                       *args,
                                       headers=headers,
                                       **kwargs) as resp:
                resp_json = await resp.json()

        code = resp_json['code']
        msg = resp_json['msg']

        if code > 0:
            # documentation: https://open.feishu.cn/document/ukTMukTMukTM/ugjM14COyUjL4ITN
            if code == 99991663:
                # tenant access token error
                # invalidate the cache and retry again
                self.token_cache.clear()
                raise TokenExpiredError(code, msg)
            raise RequestError(code, msg)

        logger.debug(f'requested: url={url} response={resp_json}')

        return resp_json

    async def get(self, endpoint, *args, **kwargs):
        return await self.request('GET', endpoint, *args, **kwargs)

    async def post(self, endpoint, *args, **kwargs):
        return await self.request('POST', endpoint, *args, **kwargs)

    # refresh every 1 hour
    async def get_access_token(self):
        cached_token = self.token_cache.get(keys.hashkey(self))

        if cached_token:
            return cached_token

        url = f'/auth/v3/app_access_token/internal/'
        resp = await self.post(url,
                               no_auth=True,
                               json={
                                   'app_id': self.app_id,
                                   'app_secret': self.app_secret
                               })
        token = resp['tenant_access_token']
        self.token_cache[keys.hashkey(self)] = token

        return token

    @cached(TTLCache(32, timedelta(days=1).seconds))
    async def get_user_detail(self, open_id: str):
        url = f'/contact/v1/user/batch_get'
        resp = await self.get(url, params={'open_ids': open_id})
        return resp['data']['user_infos'][0]

    # refresh every 5 minutes
    @cached(TTLCache(1, timedelta(minutes=5).seconds))
    async def get_groups(self):
        resp = await self.get('/chat/v4/list')
        return resp['data']['groups']

    async def update_group_name(self, chat_id: str, new_name: str):
        """
        Update group name
        """
        resp = await self.post('/chat/v4/update/',
                               json={
                                   'chat_id': chat_id,
                                   'name': new_name
                               })

        return resp

    async def send_to_groups(self,
                             msg_type,
                             content=None,
                             card=None,
                             **kwargs):
        groups = await self.get_groups()
        tasks = []
        for g in groups:
            payload = {
                'chat_id': g['chat_id'],
                'msg_type': msg_type,
            }
            if card is not None:
                payload['card'] = card
                payload['update_multi'] = kwargs['is_shared']
            else:
                payload['content'] = content
            tasks.append(self.post('/message/v4/send/', json=payload))

        results = await asyncio.gather(*tasks)

        return zip(groups, results)

    async def send_text(self, text: str):
        """
        Send plain text
        """
        results = await self.send_to_groups('text', {'text': text})
        logger.debug(f'Sent text={text} to {[g["name"] for g, _ in results]}')

    async def upload_image(self, url):
        """
        Upload image of the given url
        """
        async with ClientSession() as session:
            img_resp = await session.get(url)
            b = await img_resp.content.read()

        resp = await self.post('/image/v4/put/',
                               data={
                                   'image_type': 'message',
                                   'image': b
                               })

        image_key = resp['data']['image_key']
        logger.debug(f'uploaded image: url={url} image_key={image_key}')

        return image_key

    async def send_image(self, image_url):
        """
        Send image
        """
        image_key = await self.upload_image(image_url)
        results = await self.send_to_groups('image', {'image_key': image_key})
        logger.debug(
            f'Sent image_url={image_url} to {[g["name"] for g, _ in results]}')

    async def send_post(self, title, content):
        """
        Send post(image+text)
        documentation: https://open.feishu.cn/document/ukTMukTMukTM/uMDMxEjLzATMx4yMwETM
        """
        results = await self.send_to_groups(
            'post', {'post': {
                'zh_cn': {
                    'title': title,
                    'content': content
                }
            }})
        logger.debug(
            f'Sent title={title} to {[g["name"] for g, _ in results]}')

    async def send_card(self, card, is_shared=False):
        """
        Send interactive card
        documentation: https://open.feishu.cn/document/ukTMukTMukTM/ugTNwUjL4UDM14CO1ATN 
        """
        assert isinstance(card, dict)
        results = await self.send_to_groups('interactive',
                                            card=card,
                                            is_shared=is_shared)
        logger.debug(f'Sent {card} to {[g["name"] for g, _ in results]}')
Exemplo n.º 10
0
                sender_id, cfg.FanpageToken))

        res = conn.getresponse()
        data = res.read()
        response = data.decode("utf-8")
        obj = json.loads(response)
        return len(obj['data']) > 0
    except Exception:
        logging.warning(
            "Error one get_info, sender_id[{}] response[{}] traceback[{}]".
            format(sender_id, response, traceback.format_exc()))
        return False


with lock:
    cache.clear()


# Tạo tên xưng hô từ giới tính lấy được từ Facebook
def make_viet_name_gender(value, is_upper):
    if value == "male":
        if is_upper:
            return "Anh"
        else:
            return "anh"
    elif value == "female":
        if is_upper:
            return "Chị"
        else:
            return "chị"
    return "Anh/Chị"
Exemplo n.º 11
0
class NodeBase:
    """Abstract ROS Node class with additional functionality

    Args:
        name (str): Name of the node
        parameter_cache_time (int) = 1: Duration for which parameters will be cached, for performance
        log_level (int) = rospy.INFO: Loglevel with which the node works.

    A basic node with a subscriber and publisher can be created in the following way:

    >>> from simulation.utils.ros_base.node_base import NodeBase
    >>> class NodeSubClass(NodeBase):
    ...     def __init__(self):
    ...         super(NodeSubClass,self).__init__("node_name") # Important!
    ...         # ...
    ...         self.run() # Calls .start() if self.param.active is True (default: True)
    ...     def start(self):
    ...         # self.subscriber = ...
    ...         # self.publisher = ...
    ...         super().start() # Make sure to call this!
    ...     # Called when deactivating the node by setting self.param.active to false
    ...     # E.g. through command line with: rosparam set .../node/active false
    ...     # or when ROS is shutting down
    ...     def stop(self):
    ...         # self.subscriber.unregister()
    ...         # self.publisher.unregister()
    ...         super().stop() # Make sure to call this!

    Attributes:
        param (ParameterObject): Attribute of type :class:`ParameterObject`,
            which provides an abstraction layer to access ROS parameters.
            The following line shows how to access a ROS parameter in any subclass of called *param_1*:

            >>> self.param.param_1  # doctest: +SKIP
            \'value_1\'

            This is equivalent to:

            >>> rospy.get_param(\'~param_1\')  # doctest: +SKIP
            'value_1'

            Setting a parameter is equally easy:

            >>> self.param.param_2 = \'value_2\'  # doctest: +SKIP

            This is equivalent to:

            >>> rospy.set_param(\'~param_2\', \'value_2\')  # doctest: +SKIP

            The magic begins when parameters are defined in a hierarchical structure.
            After starting a node with the following YAML parameter file:

            .. highlight:: yaml
            .. code-block:: yaml

                car:
                    name: 'dr_drift'
                    size:
                        length: 0.6
                        width: 0.4
                ...

            the cars dimensions can be retrieved just like any other python attribute:

            >>> self.param.car.size.length  # doctest: +SKIP
            0.6

            and changes are also synchronized with ROS:

            >>> rospy.get_param(\"~car/name\")  # doctest: +SKIP
            \'dr_drift\'
            >>> self.param.car.name = \'captain_rapid\'  # doctest: +SKIP
            >>> rospy.get_param(\"~car/name\")  # doctest: +SKIP
            \'captain_rapid\'
    """
    def __init__(self,
                 *,
                 name: str,
                 parameter_cache_time: float = 1,
                 log_level: int = rospy.INFO):

        rospy.init_node(name, log_level=log_level)

        # Parameters
        self._parameter_cache = TTLCache(maxsize=128, ttl=parameter_cache_time)
        self.param = ParameterObject(ns="~",
                                     set_param_func=self._set_param,
                                     get_param_func=self._get_param)

        # Node is not yet active
        self.__active = False

        # Always call stop on shutdown!
        rospy.on_shutdown(self.__shutdown)

        # Node is by default active
        try:
            self.param.active
        except KeyError:
            self.param.active = True

    def __shutdown(self):
        """Called when ROS is shutting down.

        If the node was active before, self.stop is called.
        """
        if self.__active:
            self.__active = False
            self.stop()

    def _get_param(self, key: str) -> Any:
        """Get (possibly) cached ROS parameter.

        Arguments:
            key (str): Name of the ROS parameter

        Returns:
            If the parameter is in the parameter cache, the cached value is returned.
            Otherwise rospy.get_param(key) is returned.
        """
        # Cached version of rospy.get_param:
        get_cached_param = cached(cache=self._parameter_cache)(rospy.get_param)

        # request param
        return get_cached_param(key)

    def _set_param(self, key: str, value: Any):
        """Set ROS parameter.
        Also the parameter cache is cleared, to prevent incoherence.

        Arguments:
            key (str): Name of the ROS parameter
            value (Any): New value
        """

        # To ensure that there are no cache conflicts
        self._parameter_cache.clear()

        # Set the parameter
        rospy.set_param(key, value)

    def run(self, *, function: Callable = None, rate: float = 1):
        """Helper function, starting the node and shutting it down once ROS signals to.
        Can only be called if the subclass implements start and stop functions.

        Args:
            rate (float): Rate with which to update active/ not active status of the node
            function: Called with a frequency of ``rate`` when node is active
        """
        rate = rospy.Rate(rate)
        while not rospy.is_shutdown():
            # Node should be active, but is not.
            if self.param.active and not self.__active:
                self.__active = True
                self.start()
                rospy.loginfo(f"Activating {rospy.get_name()}")
            elif not self.param.active and self.__active:
                self.__active = False
                self.stop()
                rospy.loginfo(f"Deactivating {rospy.get_name()}")

            if self.__active and function:
                function()
            rate.sleep()

    def start(self):
        """Called when activating the node."""
        pass

    def stop(self):
        """Called when deactivating or shutting down the node."""
        pass
Exemplo n.º 12
0
class MemoCache(Cache):
    """Manages cached values for a single st.memo-ized function."""
    def __init__(
        self,
        key: str,
        persist: Optional[str],
        max_entries: float,
        ttl: float,
        display_name: str,
    ):
        self.key = key
        self.display_name = display_name
        self.persist = persist
        self._mem_cache = TTLCache(maxsize=max_entries,
                                   ttl=ttl,
                                   timer=_TTLCACHE_TIMER)
        self._mem_cache_lock = threading.Lock()

    @property
    def max_entries(self) -> float:
        return cast(float, self._mem_cache.maxsize)

    @property
    def ttl(self) -> float:
        return cast(float, self._mem_cache.ttl)

    def get_stats(self) -> List[CacheStat]:
        stats: List[CacheStat] = []
        with self._mem_cache_lock:
            for item_key, item_value in self._mem_cache.items():
                stats.append(
                    CacheStat(
                        category_name="st_memo",
                        cache_name=self.display_name,
                        byte_length=len(item_value),
                    ))
        return stats

    def read_value(self, key: str) -> Any:
        """Read a value from the cache. Raise `CacheKeyNotFoundError` if the
        value doesn't exist, and `CacheError` if the value exists but can't
        be unpickled.
        """
        try:
            pickled_value = self._read_from_mem_cache(key)

        except CacheKeyNotFoundError as e:
            if self.persist == "disk":
                pickled_value = self._read_from_disk_cache(key)
                self._write_to_mem_cache(key, pickled_value)
            else:
                raise e

        try:
            return pickle.loads(pickled_value)
        except pickle.UnpicklingError as exc:
            raise CacheError(f"Failed to unpickle {key}") from exc

    def write_value(self, key: str, value: Any) -> None:
        """Write a value to the cache. It must be pickleable."""
        try:
            pickled_value = pickle.dumps(value)
        except pickle.PicklingError as exc:
            raise CacheError(f"Failed to pickle {key}") from exc

        self._write_to_mem_cache(key, pickled_value)
        if self.persist == "disk":
            self._write_to_disk_cache(key, pickled_value)

    def clear(self) -> None:
        with self._mem_cache_lock:
            # We keep a lock for the entirety of the clear operation to avoid
            # disk cache race conditions.
            for key in self._mem_cache.keys():
                self._remove_from_disk_cache(key)

            self._mem_cache.clear()

    def _read_from_mem_cache(self, key: str) -> bytes:
        with self._mem_cache_lock:
            if key in self._mem_cache:
                entry = bytes(self._mem_cache[key])
                _LOGGER.debug("Memory cache HIT: %s", key)
                return entry

            else:
                _LOGGER.debug("Memory cache MISS: %s", key)
                raise CacheKeyNotFoundError("Key not found in mem cache")

    def _read_from_disk_cache(self, key: str) -> bytes:
        path = self._get_file_path(key)
        try:
            with streamlit_read(path, binary=True) as input:
                value = input.read()
                _LOGGER.debug("Disk cache HIT: %s", key)
                return bytes(value)
        except FileNotFoundError:
            raise CacheKeyNotFoundError("Key not found in disk cache")
        except BaseException as e:
            _LOGGER.error(e)
            raise CacheError("Unable to read from cache") from e

    def _write_to_mem_cache(self, key: str, pickled_value: bytes) -> None:
        with self._mem_cache_lock:
            self._mem_cache[key] = pickled_value

    def _write_to_disk_cache(self, key: str, pickled_value: bytes) -> None:
        path = self._get_file_path(key)
        try:
            with streamlit_write(path, binary=True) as output:
                output.write(pickled_value)
        except util.Error as e:
            _LOGGER.debug(e)
            # Clean up file so we don't leave zero byte files.
            try:
                os.remove(path)
            except (FileNotFoundError, IOError, OSError):
                pass
            raise CacheError("Unable to write to cache") from e

    def _remove_from_disk_cache(self, key: str) -> None:
        """Delete a cache file from disk. If the file does not exist on disk,
        return silently. If another exception occurs, log it. Does not throw.
        """
        path = self._get_file_path(key)
        try:
            os.remove(path)
        except FileNotFoundError:
            pass
        except BaseException as e:
            _LOGGER.exception("Unable to remove a file from the disk cache", e)

    def _get_file_path(self, value_key: str) -> str:
        """Return the path of the disk cache file for the given value."""
        return get_streamlit_file_path(_CACHE_DIR_NAME,
                                       f"{self.key}-{value_key}.memo")
Exemplo n.º 13
0
class YDiskLibrary(backend.LibraryProvider):
    root_directory = Ref.directory(uri=ROOT_URI, name='Yandex.Disk')
    disks = {}

    def __init__(self, backend, config):
        super(YDiskLibrary, self).__init__(backend)

        ext_config = config[Extension.ext_name]

        def init():
            user_agent = get_user_agent()
            proxy = get_proxy(config)
            self.disks = {
                disk.id: disk
                for disk in (
                    YDisk(token=token, proxy=proxy, user_agent=user_agent)
                    for token in ext_config['tokens']
                )
            }
            logger.info(
                'YDisks initialized: %s',
                ', '.join(iterkeys(self.disks)) or '[none]'
            )

        self._browse_cache = TTLCache(maxsize=1000, ttl=30 * 60)
        self._init = init
        if ext_config['tagging_mode'] > 0:
            self._tagger = Tagger.start(config).proxy()

    def init(self):
        self._init()

    def dispose(self):
        for disk in itervalues(self.disks):
            disk.dispose()
        if self._tagger:
            self._tagger.stop()
        self._browse_cache.clear()

    @cachedmethod(cache=lambda self: self._browse_cache, key=lambda uri: uri)
    def browse(self, uri):
        if uri == ROOT_URI:
            return [
                Ref.directory(uri=ROOT_URI + disk.id, name=disk.name)
                for disk in itervalues(self.disks)
            ]
        else:
            disk_id, _, dir_path = _resource_coords(uri)
            disk = self.disks[disk_id]
            return [
                YDiskLibrary._make_ref(disk_id, resource)
                for resource in disk.browse_dir(dir_path)
            ]

    def lookup(self, uri):
        disk_id, file_name, file_path = _resource_coords(uri)
        disk = self.disks[disk_id]
        track = Track(uri=uri, name=file_name)
        if self._tagger:
            track_f = self._tagger.get_track(
                uri, lambda: disk.get_file(file_path)
            )
            track = track_f.get() or track
        return [track]

    def get_images(self, uris):
        return {}

    @staticmethod
    def _make_ref(disk_id, resource):
        resource_uri = (furl(ROOT_URI) / disk_id / resource.path).url
        if isinstance(resource, YDiskDirectory):
            return Ref.directory(uri=resource_uri, name=resource.name)
        else:
            return Ref.track(uri=resource_uri, name=resource.name)
Exemplo n.º 14
0
class AutoTTLCache(MutableMapping):
    def __init__(self,
                 items=None,
                 *,
                 maxsize,
                 ttl,
                 timer=time.monotonic,
                 getsizeof=None):
        self._cache_lock = threading.Lock()
        self._cache = TTLCache(maxsize, ttl, timer=timer, getsizeof=getsizeof)
        if items is not None:
            self._cache.update(items)
        self._monitor = CacheMonitor(self)

    @property
    def ttl(self):
        with self._cache_lock:
            return self._cache.ttl

    @property
    def maxsize(self):
        with self._cache_lock:
            return self._cache.maxsize

    @property
    def timer(self):
        with self._cache_lock:
            return self._cache.timer

    def expire(self):
        with self._cache_lock:
            self._cache.expire()

    def __contains__(self, key):
        with self._cache_lock:
            return key in self._cache

    def __setitem__(self, k, v):
        with self._cache_lock:
            self._cache[k] = v

    def __delitem__(self, k):
        with self._cache_lock:
            del self._cache[k]

    def __getitem__(self, k):
        with self._cache_lock:
            return self._cache[k]

    def __len__(self) -> int:
        with self._cache_lock:
            return len(self._cache)

    def __iter__(self):
        with self._cache_lock:
            keys = list(self._cache)
        yield from keys

    # TODO: __reduce__ and __setstate__

    def __repr__(self):
        return f"{type(self).__name__}(max_size={self.maxsize}, ttl={self.ttl})"

    def clear(self):
        with self._cache_lock:
            self._cache.clear()

    def get(self, *args, **kwargs):
        with self._cache_lock:
            self._cache.get(*args, **kwargs)

    def pop(self, *args, **kwargs):
        with self._cache_lock:
            self._cache.pop(*args, **kwargs)

    def setdefault(self, *args, **kwargs):
        with self._cache_lock:
            self._cache.setdefault(*args, **kwargs)

    def popitem(self):
        with self._cache_lock:
            self._cache.popitem()
Exemplo n.º 15
0
class WebDAVFS(FS):

    _meta = {
        'case_insensitive': False,
        'invalid_path_chars': '\0',
        'network': True,
        'read_only': False,
        'thread_safe': True,
        'unicode_paths': True,
        'virtual': False,
    }

    def __init__(self, url, login=None, password=None, root=None,
                 cache_maxsize=10000, cache_ttl=60):
        self.url = url
        self.root = root
        super(WebDAVFS, self).__init__()

        options = {
            'webdav_hostname': self.url,
            'webdav_login': login,
            'webdav_password': password,
            'root': self.root
        }
        self.info_cache = TTLCache(maxsize=cache_maxsize,
                                   ttl=cache_ttl)
        self.client = wc.Client(options)

    def _create_resource(self, path):
        urn = wu.Urn(path)
        res = wc.Resource(self.client, urn)
        return res

    def get_resource(self, path):
        return self._create_resource(path.encode('utf-8'))

    @staticmethod
    def _create_info_dict(info):
        info_dict = {
            'basic': {"is_dir": False},
            'details': {'type': int(ResourceType.file)},
            'access': {},
	    'other': {}
        }

        if six.PY2:
            def decode(s):
                return s.decode('utf-8') if isinstance(s, bytes) else s
        else:
            def decode(s):
                return s

        def decode_datestring(s):
            dt = dateutil.parser.parse(s)
            return (dt - epoch).total_seconds()

        for key, val in six.iteritems(info):
            if key in basics:
                info_dict['basic'][key] = decode(val)
            elif key in details:
                if key == 'size' and val:
                    val = int(val)
                elif val:
                    if key in ('modified', 'created', 'accessed'):
                        val = decode_datestring(val)
                if key in ('modified', 'created', 'accessed'):
                    info_dict['details'][key] = decode(val) or None
                else:
                    info_dict['details'][key] = decode(val) 
            elif key in access:
                info_dict['access'][key] = decode(val)
            else:
                info_dict['other'][key] = decode(val)

        return info_dict

    def create(self, path, wipe=False):
        with self._lock:
            if not wipe and self.exists(path):
                return False
            with self.openbin(path, 'wb') as new_file:
                new_file.truncate(0)
            return True

    def exists(self, path):
        _path = self.validatepath(path)
        return self.client.check(_path.encode('utf-8'))

    def getinfo(self, path, namespaces=None):
        _path = self.validatepath(path)
        if _path in '/':
            self.info_cache.clear()
        try:
            _path = self.validatepath(path)
            namespaces = namespaces or ()
            urn =wu.Urn(_path.encode('utf-8'))
            path = self.client.get_full_path(urn);
            if path in self.info_cache:
                info = self.info_cache[path]
                response = None
            else:
                response = self.client.execute_request(action='info',
                                                       path=urn.quote())
                info = wc.WebDavXmlUtils.parse_info_response(content=response.content, path=path, hostname=self.client.webdav.hostname)
                if info['name'] is None:
                    info['name'] = _path.split("/")[-1]
                if wc.WebDavXmlUtils.parse_is_dir_response(content=response.content, path=path, hostname=self.client.webdav.hostname):
                    info['isdir'] = True
                    info['files'] = []
                    for i in wc.WebDavXmlUtils.parse_get_list_info_response(response.content):
                        if i['path'].rstrip('/') != path.rstrip('/'):
                            self.info_cache[i['path']] = i
                            filename = wu.Urn(i['path'], i['isdir']).filename()
                            if six.PY2:
                                filename = filename.decode('utf-8')
                            filename = filename.rstrip('/')
                            info['files'].append(filename)
                self.info_cache[path] = info
            info_dict = self._create_info_dict(info)
            if info.get('isdir', False):
                info_dict['basic']['is_dir'] = True
                info_dict['details']['type'] = ResourceType.directory
        except we.RemoteResourceNotFound as exc:
            raise errors.ResourceNotFound(path, exc=exc)
        retval = Info(info_dict)
        return retval

    def listdir(self, path):
        info = self.getinfo(path)
        if not info.is_dir:
            raise errors.DirectoryExpected(path)
        for i in info.raw['other']['files']:
            yield i
        return

    def makedir(self, path, permissions=None, recreate=False):
        _path = self.validatepath(path)

        if _path in '/':
            if not recreate:
                raise errors.DirectoryExists(path)

        elif not (recreate and self.isdir(path)):
            if self.exists(_path):
                raise errors.DirectoryExists(path)
            try:
                self.client.mkdir(_path.encode('utf-8'))
            except we.RemoteParentNotFound as exc:
                raise errors.ResourceNotFound(path, exc=exc)

        return self.opendir(path)

    def openbin(self, path, mode='r', buffering=-1, **options):
        _mode = Mode(mode)
        _mode.validate_bin()
        _path = self.validatepath(path)

        log.debug("openbin: %s, %s", path, mode)
        with self._lock:
            try:
                info = self.getinfo(_path)
                log.debug("Info: %s", info)
            except errors.ResourceNotFound:
                if not _mode.create:
                    raise errors.ResourceNotFound(path)
                # Check the parent is an existing directory
                if self.gettype(dirname(_path)) is not ResourceType.directory:
                    raise errors.DirectoryExpected(dirname(path))
            else:
                if info.is_dir:
                    raise errors.FileExpected(path)
                if _mode.exclusive:
                    raise errors.FileExists(path)
        return WebDAVFile(self, _path, _mode)

    def remove(self, path):
        _path = self.validatepath(path)
        if self.getinfo(path).is_dir:
            raise errors.FileExpected(path)
        self.client.clean(_path.encode('utf-8'))

    def removedir(self, path):
        _path = self.validatepath(path)
        if path in '/':
            raise errors.RemoveRootError()
        if not self.getinfo(path).is_dir:
            raise errors.DirectoryExpected(path)
        if not self.isempty(_path):
            raise errors.DirectoryNotEmpty(path)
        self.client.clean(_path.encode('utf-8'))

    def setbytes(self, path, contents):
        if not isinstance(contents, bytes):
            raise TypeError('contents must be bytes')
        _path = self.validatepath(path)
        bin_file = io.BytesIO(contents)
        with self._lock:
            resource = self._create_resource(_path.encode('utf-8'))
            resource.read_from(bin_file)

    def setinfo(self, path, info):
        _path = self.validatepath(path)
        if not self.exists(_path):
            raise errors.ResourceNotFound(path)

    def copy(self, src_path, dst_path, overwrite=False):
        _src_path = self.validatepath(src_path)
        _dst_path = self.validatepath(dst_path)

        with self._lock:
            if not self.getinfo(_src_path).is_file:
                raise errors.FileExpected(src_path)
            if not overwrite and self.exists(_dst_path):
                raise errors.DestinationExists(dst_path)
            try:
                self.client.copy(_src_path.encode('utf-8'), _dst_path.encode('utf-8'))
            except we.RemoteResourceNotFound as exc:
                raise errors.ResourceNotFound(src_path, exc=exc)
            except we.RemoteParentNotFound as exc:
                raise errors.ResourceNotFound(dst_path, exc=exc)

    def move(self, src_path, dst_path, overwrite=False):
        _src_path = self.validatepath(src_path)
        _dst_path = self.validatepath(dst_path)

        if not self.getinfo(_src_path).is_file:
            raise errors.FileExpected(src_path)
        if not overwrite and self.exists(_dst_path):
            raise errors.DestinationExists(dst_path)
        with self._lock:
            try:
                self.client.move(_src_path.encode('utf-8'), _dst_path.encode('utf-8'), overwrite=overwrite)
            except we.RemoteResourceNotFound as exc:
                raise errors.ResourceNotFound(src_path, exc=exc)
            except we.RemoteParentNotFound as exc:
                raise errors.ResourceNotFound(dst_path, exc=exc)