Пример #1
0
def worker(queue, eviction_policy):
    timings = {'get': [], 'set': [], 'delete': []}
    cache = Cache('tmp', eviction_policy=eviction_policy)

    for index, (action, key, value) in enumerate(iter(queue.get, None)):
        start = time.time()

        if action == 'set':
            cache.set(key, value, expire=EXPIRE)
        elif action == 'get':
            result = cache.get(key)
        else:
            assert action == 'delete'
            cache.delete(key)

        stop = time.time()

        if action == 'get' and PROCESSES == 1 and THREADS == 1 and EXPIRE is None:
            assert result == value

        if index > WARMUP:
            timings[action].append(stop - start)

    queue.put(timings)

    cache.close()
Пример #2
0
class URLhaus:
    """Simple client to query URLhaus by abuse.ch.
    :param query: domain, url or hash.
    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file.
    :type query: string
    :type cache_duration: int
    :type cache_root: str
    """

    def __init__(self,
                 query,
                 cache_duration=300,
                 cache_root="/tmp/cortex/URLhaus"):
        self.URL = "https://urlhaus.abuse.ch/browse.php"
        self.query = query
        self.cache = None
        if cache_duration > 0:
            self.cache = Cache(cache_root)
            self.cache_duration = cache_duration

    def _get_raw_data(self):
        try:
            return self.cache[self.query.encode('utf-8')]
        except(AttributeError, TypeError):
            return self.fetch()
        except KeyError:
            self.cache.set(
                self.query.encode('utf-8'),
                self.fetch(),
                expire=self.cache_duration)
            return self.cache[self.query.encode('utf-8')]

    def search(self):
        res = self._get_raw_data()
        return self.parse(res)

    def fetch(self):
        payload = {"search": self.query}
        return requests.get(self.URL, params=payload).text

    def parse(self, doc):
        results = []
        soup = BeautifulSoup(doc, "html.parser")
        table = soup.find("table", class_="table")
        rows = table.find_all("tr")[1:]
        for row in rows:
            cols = row.find_all("td")
            results.append({
                "dateadded": cols[0].text,
                "malware_url": cols[1].text,
                "link": cols[1].find("a").attrs.get("href"),
                "status": cols[2].text,
                "tags": cols[3].text.split(),
                "gsb": cols[4].text,
                "reporter": cols[5].text
            })
        return results
Пример #3
0
def _set_cached(path, content):
    # 1/ memory cache
    JUMBO_FIELDS_MEMORY_CACHE[path] = content

    # 2/ disk cache
    if SIMPLEFLOW_ENABLE_DISK_CACHE:
        try:
            cache = Cache(constants.CACHE_DIR)
            cache_key = "jumbo_fields/" + path.split("/")[-1]
            logger.debug("diskcache: setting key={} on cache_dir={}".format(cache_key, constants.CACHE_DIR))
            cache.set(cache_key, content, expire=3 * constants.HOUR)
        except OperationalError:
            logger.warning("diskcache: got an OperationalError on write, skipping cache write")
Пример #4
0
def _set_cached(path, content):
    # 1/ memory cache
    JUMBO_FIELDS_MEMORY_CACHE[path] = content

    # 2/ disk cache
    if SIMPLEFLOW_ENABLE_DISK_CACHE:
        try:
            cache = Cache(constants.CACHE_DIR)
            cache_key = "jumbo_fields/" + path.split("/")[-1]
            logger.debug("diskcache: setting key={} on cache_dir={}".format(
                cache_key, constants.CACHE_DIR))
            cache.set(cache_key, content, expire=3 * constants.HOUR)
        except OperationalError:
            logger.warning(
                "diskcache: got an OperationalError on write, skipping cache write"
            )
Пример #5
0
class Cache(object):
    def __init__(self):
        from diskcache import Cache
        self.cache = Cache('/tmp/navan')
        self.cache.stats(enable=True)

    def get(self, *args):
        return self.cache.get(':'.join(args))

    def set(self, *args, **kwargs):
        expire = kwargs.get('expire')
        if len(args) < 2:
            raise Exception('cache set must contain `key` and `value`')
        key, value = args[:-1], args[-1]
        key = ':'.join(key)
        return self.cache.set(key, value, expire)

    def get_json(self, *args):
        ret = self.get(*args)
        if not ret:
            return ret
        return json.loads(ret)

    def set_json(self, *args, **kwargs):
        args = list(args)
        args[-1] = json.dumps(args[-1])
        return self.set(*args, **kwargs)
class DiscCacheWrapper():

    DISC_CACHE_PATH = "./tmp/flickr-auth-disc-cache"

    def __init__(self):
        self.disc_cache = Cache(directory=DiscCacheWrapper.DISC_CACHE_PATH)

    def get(self, key):
        # This can raise a diskcache.Timeout error if it fails to talk to its database
        return self.disc_cache.get(key=key, default=None, retry=False)

    def set(self, key, value, timeout=None):
        # This can raise a diskcache.Timeout error if it fails to talk to its database
        self.disc_cache.set(key=key, value=value, expire=timeout, retry=False)

    def delete(self, key):
        self.disc_cache.delete(key=key)
Пример #7
0
class Cache(object):
    def __init__(self):
        try:
            self.cache = DC('./tmp')
        except Exception as ex:
            print('Get an exception with diskcache open: {}'.format(ex))
            self.cache = None

    def __del__(self):
        try:
            self.cache.close()
        except Exception as ex:
            print('Get an exception with diskcache close: {}'.format(ex))

    def set(self, key, value):
        if self.cache is not None:
            self.cache.set(key, BytesIO(value), read=True, tag=u'data')

    def get(self, key):
        if self.cache is not None:
            value = self.cache.get(key, default=b'', read=True, tag=True)
            if value is not None and value != b'':
                return value
        return None

    def pop(self, key):
        if self.cache is not None:
            value = self.cache.pop(key, default=b'', read=True, tag=True)
            if value is not None and value != b'':
                return value
        return None

    def delete(self, key):
        if self.cache is not None:
            self.cache.delete(key)

    def create_index(self):
        if self.cache is not None:
            self.cache.create_tag_index()
            return self.cache.tag_index
        return None

    def clear_all(self):
        if self.cache is not None:
            self.cache.clear()
Пример #8
0
def get_ingress_status(domain, code):
    cache = Cache('/tmp/check_k8s_status.cache')
    data_cache = cache.get(domain, default=b'', read=True, expire_time=False)
    if domain in cache:
        # print(data_cache)
        if code in data_cache:
            print(data_cache[code])
        else:
            print(0.0)
        exit(0)

    data = get_status_code_ratio(domain)
    expire_time = es_query_duration / 1000
    cache.set(domain, data, expire=expire_time)
    if code in data:
        print(data[code])
    else:
        print(0.0)
Пример #9
0
class DiskCache():
    def __init__(self, cache_dir, ttl=None):
        self.ttl = ttl
        self.cache = Cache(cache_dir, eviction_policy='least-recently-used')

    def __getitem__(self, key):
        return self.cache[key]

    def __setitem__(self, key, value):
        return self.cache.set(key, value, expire=self.ttl)

    def get(self, key, default=None):
        return self.cache.get(key, default=default)

    def set(self, key, value):
        return self.cache.set(key, value, expire=self.ttl)

    def clear(self):
        self.cache.clear()
Пример #10
0
class CachedClient(Client):
    def __init__(self, apikey, agent="unknown", host=None, cache_dir=None):
        super().__init__(apikey, agent=agent, host=host)
        self.cache = Cache(cache_dir,
                           disk=VtCache,
                           disk_compress_level=6,
                           tag_index=True)
        self.cache_dir = cache_dir
        self.logger = logging.getLogger('kfinny.cachedvt.CachedClient')

    def _get(self, resource):
        data, tag = self.cache.get(resource, tag=True)
        if data and tag in ['sha1', 'md5']:
            data, tag = self.cache.get(data, tag=True)
        if data and tag == 'object':
            data = Object.from_dict(data)
        return data, tag

    def _put_object(self, obj):
        self.cache.set(obj.sha256, obj.to_dict(), tag='object')
        self.cache.set(obj.sha1, obj.sha256, tag='sha1')
        self.cache.set(obj.md5, obj.sha256, tag='md5')

    def _put_error(self, resource, error):
        self.cache.set(resource, {
            'resource': resource,
            'code': error.code,
            'message': error.message
        },
                       tag='error')

    def yield_file_report(self, resource, include_notfound=False):
        queryset = set()
        if isinstance(resource, str):
            resource = resource.split(',')
        if isinstance(resource, (tuple, list, set, frozenset)):
            for r in resource:
                data, tag = self._get(r)
                if data is not None:
                    if tag == 'object' or include_notfound:
                        yield data
                else:
                    queryset.add(r)
        resource = sorted(queryset)
        for i in resource:
            try:
                obj = self.get_object(f'/files/{i}')
                self._put_object(obj)
                yield obj
            except APIError as e:
                self._put_error(i, e)
        self.logger.debug("hits = {}, misses = {}".format(*self.cache.stats()))
class BaseCacheAnalyzer(BaseAnalyzer):
    def __init__(self,
                 cache_location=None,
                 force=False,
                 delete_cache_when_done=False,
                 **kwargs):
        super().__init__(**kwargs)
        self.cache_location = cache_location
        self.cache = None
        self.force = force
        self.delete_cache_when_done = delete_cache_when_done

    def initialize(self):
        from diskcache import Cache
        self.cache = Cache(self.cache_location or self.uid + "_cache")

    def filter(self, simulation):
        return self.force or not self.is_in_cache(simulation.id)

    def to_cache(self, key, data):
        self.cache.set(key, data)

    def from_cache(self, key):
        return self.cache.get(key)

    def is_in_cache(self, key):
        return key in self.cache

    def destroy(self):
        if self.cache:
            self.cache.close()

        if self.cache and self.delete_cache_when_done and os.path.exists(
                self.cache.directory):
            cache_directory = self.cache.directory
            del self.cache
            shutil.rmtree(cache_directory)

    @property
    def keys(self):
        return list(self.cache.iterkeys()) if self.cache else None
Пример #12
0
def main(source, clear_cache):

    comi_dir_path = '/tmp/comi'
    comi_dir_path_object = Path(comi_dir_path)
    comi_dir_path_object.mkdir(exist_ok=True)

    cache_dir_path = '/tmp/comi/cache'
    cache = Cache(cache_dir_path)

    if clear_cache:
        cache.clear()
        print('Cache cleared')
        return True

    if source:
        if 'github.com' in source and 'blob' in source:
            url = source.replace('blob', 'raw')
        else:
            url = source
    else:
        url = 'https://github.com/commmands/commands/raw/master/commands_1.commands'

    cache_value = cache.get(url)

    temp_commands_path = '/tmp/comi/temp_commands'
    temp_commands_path_object = Path(temp_commands_path)
    temp_commands_path_object.parent.mkdir(parents=True, exist_ok=True)

    if cache_value:
        temp_commands_path_object.write_text(cache_value)
    else:
        commands_response = requests.get(url)
        commands = commands_response.text
        temp_commands_path_object.write_text(commands)
        cache.set(url, commands)

    perl_part = "perl -e 'ioctl STDOUT, 0x5412, $_ for split //, do{ chomp($_ = <>); $_ }'"
    command = f"cat {temp_commands_path} | fzf --tac | {perl_part} ; echo"

    subprocess.call(command, shell=True)
Пример #13
0
class KeyValueDB(Generic[NativeType, StorageType], abc.ABC):
    """Interface for concrete DB backend."""

    _native_type: NativeType
    _storage_type: StorageType

    def __init__(self, datebase_dir: Path):
        if not datebase_dir.exists():
            datebase_dir.mkdir(mode=0o750, parents=True)
        self._cache = Cache(str(datebase_dir))

    def __contains__(self, key: Any) -> bool:
        return self._cache.__contains__(key)

    def __delitem__(self, key: Any) -> bool:
        return self._cache.__delitem__(key)

    def __getitem__(self, key: Any) -> NativeType:
        return self._storage_to_native_type(self._cache.__getitem__(key))

    def __setitem__(self, key: Any, value: NativeType) -> None:
        return self._cache.__setitem__(key, self._native_to_storage_type(value))

    def _native_to_storage_type(self, value: NativeType) -> StorageType:
        if self._native_type is self._storage_type or self._storage_type is None:
            return cast(StorageType, value)
        else:
            return self._storage_type(value)

    def _storage_to_native_type(self, value: StorageType) -> NativeType:
        if self._native_type is self._storage_type or self._native_type is None:
            return cast(NativeType, value)
        else:
            return self._native_type(value)

    def close(self, *args, **kwargs) -> None:
        return self._cache.close()

    def get(self, key: Any, default: Any = None, *args, **kwargs) -> Union[Any, NativeType]:
        value = self._cache.get(key, default, *args, **kwargs)
        if value is default:
            return default
        else:
            return self._storage_to_native_type(value)

    def set(self, key: Any, value: NativeType, *args, **kwargs) -> bool:
        return self._cache.set(key, self._native_to_storage_type(value), *args, **kwargs)

    def touch(self, *args, **kwargs) -> bool:
        return self._cache.touch(*args, **kwargs)
Пример #14
0
class CacheWrapper:
    def __init__(self, path, size):
        self._cache = Cache(path, size_limit=size)

    def update_cache_entries(self, entries: List[KVPair]):
        """
        Update cache entries
        :param entries: a list of pairs key-value
        :return:
        """
        for entry in entries:
            self._cache.set(entry.key, entry.value)  # ALWAYS override!

    def set_entry(self, entry: KVPair):
        self._cache.set(entry.key, entry.value)

    def get_cached_entries(self, keys: List[Any]) -> Tuple[List[Any], List[Any]]:
        """
        Retrieve cached entries
        :param keys: a list of keys to retrieve
        :return: a tuple (<cached results>, <missing entries>)
        """
        to_compute = []
        cached_entries = []

        for key in keys:
            entry = self._cache.get(key)
            if entry is None:
                to_compute.append(key)
            else:
                cached_entries.append(entry)

        return cached_entries, to_compute

    def get_cached_entry(self, key: Any) -> Any:
        return self._cache.get(key)
Пример #15
0
class DiskCachePersistence:
    def __init__(self, dbname, dbpassphrase):
        self.dbname = dbname
        self.dbpassphrase = dbpassphrase
        self.db = Cache(dbname)
        # TODO: create encrypted Cache with kdf dbpassphrase
        # TODO: purge expired skippedMessageKey based on store_time

    def save_conversation(self, conversation):
        return self.db.set(b'conv:' + conversation.ks['CONVid'], prefix='conv', tag='conv', retry=True)

    def load_conversation(self, conv_id):
        return self.db.get(b'conv:' + conv_id, None, retry=True)

    def delete_conversation(self, conversation):
        return self.db.pop(b'conv:' + conversation.ks['CONVid'], None, retry=True)

    def get_other_names(self, name):
        names = []
        for k in self.db:
            if k.startswith('conv:'):
                names.append(self.db[k].other_name)
        return names
Пример #16
0
class CacheManager(object):
    def __init__(self, cache_path=CACHE_PATH):
        self.cache = Cache(cache_path)

    def items(self):
        return self.cache.iterkeys()

    def has_key(self, key):
        return key in self.cache

    def set(self, key, value, ttl=TTL):
        return self.cache.set(key=key, value=value, expire=ttl)

    def get(self, key):
        return self.cache.get(key=key)

    def clear_cache(self):
        for key in self.cache:
            if key != 'censys_credentials':
                del self.cache[key]
        return True

    def close(self):
        self.cache.close()
Пример #17
0
class DiskPubSubCache(object):
    """A DiskCache-backed cache used for PubSub channels

    Attributes:
        cache (Cache): The cache which backs this pubsub cache
        _subscribers (dict{str: DiskSubscription}): The subscriptions tracked by this cache
        _threads_registered (set(str)): The names of the threads which have registered triggers on the database
        _push_partial (func): The function called when an insert or update happens on the cache

    Args:
        directory (str): The path to the directory used by this cache
        timeout (float, optional): The number of seconds to wait before an operation times out. Defaults to 0.01 seconds
    """
    _insert_func_name = 'push_on_insert'
    _update_func_name = 'push_on_update'

    def __init__(self, directory, timeout=0.01):
        self.cache = Cache(directory, timeout=timeout)
        self._subscribers = {}  # Would be nice to use a weakref to a set so that keys with no subscribers are
        self._threads_registered = set()
        self._insert_triggers()
        self._push_partial = partial(self.__push_to_subscribers)

    def publish(self, channel, data):
        """Publish data to a channel

        Args:
            channel (str): Channel to publish the data to
            data: The data to publish the data to. The data will arrive in the same format as it was set

        Returns:
            (int): The number of subscribers which received the published data
        """
        self.cache.set(channel, data)
        return len(self._subscribers.get(channel, []))

    def register_callbacks(self):
        """Registers the trigger functions for the current thread.

        A thread must have trigger functions registered before it can publish data
        """
        if threading.current_thread().name not in self._threads_registered:
            con = self._con
            for func_name in (self._insert_func_name, self._update_func_name):
                con.create_function(func_name, 2, self._push_partial)
            self._threads_registered.add(threading.current_thread().name)

    def _insert_triggers(self):
        """Inserts the original triggers into the cache, but does not create or the functions which receive the triggers
        """
        con = self._con
        for func_name, operation in [(self._insert_func_name, 'INSERT'), (self._update_func_name, 'UPDATE')]:
            con.execute('CREATE TRIGGER IF NOT EXISTS {0} AFTER {1} ON Cache BEGIN '
                        'SELECT {0}(NEW.key, NEW.value); END;'.format(func_name, operation))

    def subscribe(self, channel):
        """Subscribe to a channel

        Args:
            channel (str): The name of the channel to subscribe to

        Returns:
            (DiskSubscription): The subscription to this channel
        """
        subscription = DiskSubscription(channel)
        if channel not in self._subscribers:
            self._subscribers[channel] = WeakSet([subscription])
        else:
            self._subscribers[channel].add(subscription)
        return subscription

    def __push_to_subscribers(self, channel, value):
        try:
            value = self.__get_value(value)
            for subscriber in self._subscribers.get(str(channel), []):
                subscriber.push(value)
        except:
            import traceback
            traceback.print_exc()
            raise

    @staticmethod
    def __get_value(value):
        if value == unsubscribe_message or isinstance(value, string_types) or isinstance(value, int) or isinstance(
                value, float):
            return value
        if isinstance(value, binary_type):
            return value.decode('utf-8')
        try:
            return pickle.load(BytesIO(value))
        except (KeyError, TypeError, IndexError):
            return str(value)

    @property
    def _con(self):
        con = getattr(self.cache._local, 'con', None)

        if con is None:
            con = self.cache._local.con = sqlite3.connect(
                os.path.join(self.cache._directory, DBNAME),
                timeout=self.cache._timeout,
                isolation_level=None,
            )

            # Some SQLite pragmas work on a per-connection basis so query the
            # Settings table and reset the pragmas. The Settings table may not
            # exist so catch and ignore the OperationalError that may occur.

            try:
                select = 'SELECT key, value FROM Settings'
                settings = con.execute(select).fetchall()
            except sqlite3.OperationalError:
                pass
            else:
                for key, value in settings:
                    if key.startswith('sqlite_'):
                        self.cache.reset(key, value, update=False)

        return con

    def shutdown(self):
        """Shuts down the connection to the cache
        """
        self.cache.close()
Пример #18
0
    future = loop.run_in_executor(None, cache.set, key, val)
    result = await future
    return result


async def get_async(key):
    loop = asyncio.get_running_loop()
    future = loop.run_in_executor(None, cache.get, key)
    result = await future
    return result


##
# Utility Constants
##
cache.set("running", True)
app.last_request = time.time()
app.image_server = "https://s2.mangadex.org"
app.api_server = "https://api.mangadex.network"


##
# Sanic Utility Functions
##
@app.middleware('request')
async def add_start_time(request):
    request.ctx.start_time = time.time()


@app.middleware('response')
async def add_spent_time(request, response):
Пример #19
0
class Webserver:
    def __init__(
        self,
        host="localhost",
        port=8080,
        hostname='hostname',
        workers=os.cpu_count() - 1,
        max_connections=socket.SOMAXCONN,
        max_cache_size=4e9,
        proto='http',
    ):
        self._max_connections = max_connections
        self.host = host
        self.port = port
        self.address = f'{proto}://{self.host}:{self.port}'
        self.hostname = hostname
        self.max_workers = workers
        self.routes = {}
        self.vservers_routes = {}  # роуты для вирт. сервера
        self.regular_routes = {}
        self._response = None
        self._current_clients = []
        self.data_end = b'\r\n\r\n'
        self.connect_refresh_timeout = 10
        self.client_request_timeout = 0.1
        self._max_keep_alive = timedelta(seconds=10)
        self.cache = Cache(size_limit=int(max_cache_size))
        self.cache_expire = 100
        self._running = True

    def set_routes(self, routes):
        self.routes = {**routes, **self.routes}

    def get_routes(self):
        return self.routes

    def handle_file(self,
                    request,
                    filename,
                    root=os.getcwd(),
                    content_type=None):
        path = os.path.join(root, filename)

        # Пробуем взять отдаваемый стат. файл из cache
        res = self.cache.get(path)
        if res:
            return res

        if not os.path.exists(path):
            self.cache.set(path,
                           Errors.NOT_FOUND_PAGE,
                           expire=self.cache_expire,
                           tag='data')
            self.cache.cull()
            return Errors.NOT_FOUND_PAGE

        if content_type is None:
            content_type, _ = mimetypes.guess_type(path)
            logger.verbose('Content type for %s, was set automatically to %s',
                           path, content_type)
        response = Response.response_file(request, path, content_type)

        if response.status == 200:
            # Если код 200 OK, то добавим response в cache
            self.cache.set(path,
                           response,
                           expire=self.cache_expire,
                           tag='data')
            self.cache.cull()
        return response

    def handle_dir(self, request, dirname=os.getcwd()):
        path = os.path.abspath(dirname)
        if not os.path.exists(path):
            return Errors.NOT_FOUND_PAGE
        response = Response.response_dir(request, path)

        return response

    def get_routes_for_virtual_server(self, request):
        vserver_name = request.headers['Host']
        routes = self.vservers_routes.get(vserver_name)
        if routes:
            logger.debug('Find routes for vservers %s', vserver_name)
            return routes
        return self.regular_routes

    def find_handler(self, request: Request):
        logger.debug('%s', self.regular_routes)
        try:
            response = Errors.NOT_FOUND_PAGE
            for path_info, handler in self.get_routes_for_virtual_server(
                    request).items():
                logger.debug('checking %s %s | %s', path_info.path,
                             path_info.method, handler)
                path_info: PathInfo = path_info
                reg = path_info.path
                match = re.fullmatch(reg, request.url.path)
                if match and request.method == path_info.method:
                    logger.debug('handler found %s for %s', handler,
                                 request.url)
                    if len(match.groupdict().items()) == 0:
                        response = handler(request)
                    else:
                        response = handler(request, match.group(1))
                    break

            logger.debug('handler returned %s', response)
            return response

        except Exception:
            logger.exception("Internal Error")
            return Errors.INTERNAL_SERVER_ERROR

    def _read_data(self, client, address):
        _keep_alive_deadline = datetime.now() + self._max_keep_alive
        request = Request()
        filled = False
        while not filled:
            try:
                line = client.recv(64 * 1024)
                if not line:
                    raise ReceiveDone('Client done')

                split = Request.split_keep_sep(line,
                                               bytes(os.linesep, 'utf-8'))
                for s in split:
                    if request.dynamic_fill(s):
                        filled = True

            except BlockingIOError:
                # читали, а нам никто не написал, подождем и может напишут
                time.sleep(0.5)
                _current_time = datetime.now()
                if _current_time > _keep_alive_deadline:
                    # ждали больше чем держим keep-alive соединение
                    raise ConnectionError('Client dead')

        return request

    def _start_session(self, client, address):
        while True:
            request = self._read_data(client, address)

            request.print_headers()
            request.print_body()

            response = self.find_handler(request)
            logger.access(method=request.method,
                          url=request.target,
                          protocol=request.version,
                          status=response.status)

            Response.response(client, response)

            # закончили обрабатывать запрос от клиента
            # (получили, ответили, пришли сюда)
            if request.headers.get('Connection') == 'keep-alive':
                logger.verbose("Client %s asked for keep-alive connection",
                               address)

                # если браузер послал keep-alive (что он почти всегда
                # делает дабы избавиться от поспоянных созданий
                # подключений и ускорить общение с сервером)
                client.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
                # и продолжаем крутиться в while True:
            else:
                # иначе выходим из while True: (клиент не хочет больше
                # с нами общаться)
                break

    def handle(self, client, address):
        init_thread_local(THREAD_LOCAL_DATA)
        THREAD_LOCAL_DATA.client = address
        with client:
            try:
                self._start_session(client, address)
            except ConnectionError:
                logger.verbose('Client was not alive for %s seconds',
                               self._max_keep_alive)
            except OSError:
                logger.exception('Client had an issue')
            except ReceiveDone:
                logger.verbose('Client received the content')
            except Exception:
                logger.exception('Client had an error')
                Response.response(client, Errors.INTERNAL_SERVER_ERROR)

        logger.verbose("Disconnected")

    def run(self, stop: Event):
        self._running = True
        server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        self.make_regular_routes()

        with server_socket:
            server_socket.bind((self.host, self.port))
            server_socket.listen(self._max_connections)
            server_socket.setblocking(False)
            logger.verbose(f'Start server on {self.address}')
            logger.debug('T %s', server_socket.gettimeout())
            with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
                logger.verbose('ThreadPoolExecutor started')
                while not stop.is_set():
                    try:
                        self._check_is_running()
                        try:
                            client, address = server_socket.accept()
                            THREAD_LOCAL_DATA.client = address
                            logger.verbose(f'Got client')

                            future = executor.submit(self.handle, client,
                                                     address)
                            self._current_clients.append(future)
                        except (socket.timeout, BlockingIOError):
                            # никто не подключился, можно подождать
                            # logger.info(f'No client')
                            time.sleep(1)
                            pass
                    except KeyboardInterrupt:
                        logger.verbose('Server shutdown started')
                        break
                logger.verbose(('ThreadPoolExecutor '
                                'waiting for alive connections'))
                for future in self._current_clients:
                    result = future.result()

            logger.verbose('ThreadPoolExecutor finished')

    def _check_is_running(self):
        if not self._running:
            raise KeyboardInterrupt

    def stop(self):
        self._running = False

    def route(self, path, method='GET', vserver='localhost'):
        logger.debug('Adding %s [%s]', path, method)

        if path in self.routes:
            raise AssertionError("Such route already exists.")

        def wrapper(handler):
            path_info = PathInfo(path, method)
            if vserver == 'localhost':
                self.set_routes({path_info: handler})
            if vserver in self.vservers_routes:
                self.vservers_routes[vserver][path_info] = handler
                logger.debug('Add route %s for vserver %s', path_info, vserver)
            else:
                self.vservers_routes[vserver] = {path_info: handler}
                logger.debug('INIT route %s for vserver %s', path_info,
                             vserver)
            return handler

        return wrapper

    def make_regular_routes(self):
        logger.debug('Processing routes %s', self.routes)
        for path_info, handler in self.routes.items():
            reg = re.compile(path_info.path)
            path_info.path = reg
            self.regular_routes[path_info] = handler
Пример #20
0
class PandaXT:
    """A "ccxt" exchanges wrapper class over Pandas lib."""

    def __init__(self, exchange, api_key=None, secret=None, user_agent=None, clear_cache=False) -> NoReturn:
        """Class constructor.

        >>> markets = PandaXT('binance').markets
        >>> isinstance(markets, Markets)
        True
        >>> market = markets.get('BTC/USDT')  # type: Market
        >>> isinstance(market, Market)
        True
        >>> isinstance(market.precision.price, int)
        True

        :param Text exchange: a ccxt lib supported exchange
        :param Text api_key: API key
        :param Text secret: API secret
        :param U[Bool, Text] user_agent:  if True, exchange API keys will be load from "$HOME/.env" file.
        :param Bool clear_cache:  if True, current cache will be ignored and overwrite.
        """
        # for cache purposes
        self._basemarkets = None
        self._currencies = None
        self._symbols = None
        self._markets = None

        if clear_cache:
            self._cache.clear()

        exchange = _get_version(exchange)
        # assert exchange is not None, f'"{exchange}" not supported'
        api = getattr(ccxt, exchange)
        settings = dict(timeout=25000, enableRateLimit=True, fetchTickersErrors=False)

        if user_agent or False:
            if isinstance(user_agent, bool):
                settings.update(userAgent=api.userAgents[1])
            else:
                settings.update(userAgent=str(user_agent))

        if not all((api_key, secret)):
            load_dotenv()
            field_name = exchange.upper().strip(' _012345')
            key_field, secret_field = f'{field_name}_KEY', f'{field_name}_SECRET'
            api_key = os.environ.get(key_field)
            secret = os.environ.get(secret_field)

        if api_key and secret:
            settings.update(apiKey=api_key, secret=secret)

        if exchange == 'binance':
            # noinspection PyUnresolvedReferences
            settings.update(adjustForTimeDifference=True, parseOrderToPrecision=True)

        self._api = api(settings)

    def _load_markets(self) -> Markets:
        """Markets metadata cache handler.

        >>> isinstance(PandaXT('binance')._load_markets(), Markets)
        True

        :return: markets metadata as "Markets" instance.
        """
        if self._markets is not None:
            data = self._markets
        else:
            self._cache_dir = _DATA_DIR / 'pandaxt' / self.id
            self._cache_dir.mkdir(exist_ok=True, parents=True)
            self._cache = Cache(str(self._cache_dir))
            data = self._cache.get('markets', dict())
            if len(data) == 0:
                data = self._api.load_markets()
                data = {k: {x: y for x, y in v.items() if y}
                        for k, v in data.items()
                        if v}
                self._cache.set('markets', data, (60.0 ** 2.0) * 6.0)
        return Markets(**data)

    @property
    def id(self) -> Text:
        """Exchange unique reference (also know as ID).

        >>> PandaXT('binance').id
        'binance'

        :return: exchange unique reference.
        """
        return self._api.id

    @property
    def timeframes(self) -> List[Text]:
        """Return valid exchange timeframes as list.

        >>> '15m' in PandaXT('binance').timeframes
        True

        :return: valid exchange timeframes.
        """
        items = self._api.timeframes.items()
        od = OrderedDict(sorted(items, key=lambda x: x[1]))
        return list(od.keys())

    @property
    def delisted(self) -> Symbols:
        """Returns delisted symbols (active -> False)

        >>> binance = PandaXT('binance')
        >>> delisted = binance.delisted
        >>> isinstance(delisted, list)
        True

        :return: return delisted symbols
        """
        return Symbols(*[m
                         for m in self.markets
                         if hasattr(m, 'active') and not m.active])

    @property
    def name(self) -> Text:
        """Exchange long name.

        :return: exchange long name.
        """
        return getattr(self._api, 'name')

    @property
    def symbols(self) -> Symbols:
        """Get all supported symbols by exchange as "Symbol" list."""
        if self._symbols is None:
            self._symbols = sorted([m for m in self.markets if m not in self.delisted])
        return Symbols(self._symbols)

    @property
    def base_markets(self) -> Currencies:
        """Get exchange base markets currencies as "Currency" list."""
        if self._basemarkets is None:
            self._basemarkets = sorted(list({s.quote for s in self.symbols}))
        return Currencies(self._basemarkets)

    @property
    def currencies(self) -> Currencies:
        """Get supported currencies by exchange as "Currency" list.

        >>> exchange = PandaXT('binance')
        >>> len(exchange.currencies) > 0
        True

        :return: all active currencies supported by exchange.
        """
        # Initialize markets, symbols, currencies and basemarkets
        if self._currencies is None:
            self._currencies = [s.base for s in self.symbols]
            self._currencies = self._currencies + list(self.base_markets)
            self._currencies = sorted(list(set(self._currencies)))

        return Currencies(self._currencies)

    def get_market_precision(self, symbol, precision_type=None) -> Int:
        """Get precision set by exchange for a market.

        >>> PandaXT('binance').get_market_precision('MATIC/USDT')

        :param symbol: symbol of the market where precision will be return.
        :param precision_type: accepted types: "price", "amount", "cost", "base", "quote" (default "price")
        :return: precision length for the supplied market symbol.
        """
        market: Market = self.markets.get(Symbol(symbol))
        precision = market.precision
        return getattr(precision, precision or 'price')

    @property
    def markets(self) -> Markets:
        """Get all exchange markets metadata.

        >>> exchange = PandaXT('binance')
        >>> markets = exchange.markets
        >>> isinstance(markets, Markets) and len(markets) > 1
        True

        :return Dict: all exchange markets metadata.
        """
        if self._markets is None:
            self._markets = self._load_markets()
        return self._markets

    def parse_timeframe(self, timeframe) -> U[Int, NoReturn]:
        """Get amount of seconds for a supplied string formatted timeframe.

        :param timeframe: a supported and valid string formatted timeframe (example: "15m")
        :return: None if timeframe is not supported otherwise, amount of seconds for supplied timeframe.
        """
        if hasattr(self._api, 'parse_timeframe'):
            return self._api.parse_timeframe(timeframe)

    def get_timeframe(self, timeframe) -> Text:
        """Timeframe sanitizer.

        >>> PandaXT('binance').get_timeframe(15)
        '15m'

        :param timeframe: timeframe to sanitize.
        :type timeframe: Text or Int
        :return str: sanitize timeframe.
        """
        timeframe = str(timeframe)
        if timeframe.isdigit() or not timeframe[-1].isalpha():
            timeframe = f'{timeframe}m'
        if timeframe not in self.timeframes:
            raise TimeframeError(timeframe, exchange=self.name)
        else:
            return timeframe

    # @check_symbol
    def get_ohlc(self, symbol, timeframe='15m', limit=25) -> pd.DataFrame:
        """Get OHLC data for specific symbol as pandas DataFrame type.

        :param Text symbol: symbol name use at ohlc data request.
        :param Text timeframe: an exchange supported timeframe.
        :param int limit: max rows limit.
        :return pd.DataFrame: data-frame with: open, high, low, close, volume, qvolume columns and 'date' as index.
        """

        if Symbol(symbol) not in self.symbols:
            # print(symbol, symbol in self.symbols, len(self.symbols))
            raise SymbolError(symbol, exchange=self.name)

        data = self._api.fetch_ohlcv(symbol, timeframe=timeframe, limit=limit)
        df = pd.DataFrame(data, columns=_OHLC_FIELDS)
        df['qvolume'] = df['volume'] * df['close']
        df.index = pd.to_datetime(df.pop('date') // 1000, unit='s')
        df.name = 'date'
        return df

    def get_currency(self, currency) -> Currency:
        """Currency name sanitizer."""
        if currency:
            if currency in self._api.commonCurrencies:
                currency = self._api.commonCurrencies.get(currency)
            if currency not in self.currencies:
                log.debug(f'Currency {str(currency)} is not supported by exchange.')
            return Currency(currency)

    def altname(self, name) -> U[Currency, Symbol]:
        """Retrieve alternative currency or symbol name used in a specific exchange.

        >>> PandaXT('binance').altname('YOYOW')
        'YOYOW'
        >>> PandaXT('binance').altname('YOYOW/BTC')
        (Symbol:YOYOW/BTC)
        >>> PandaXT('binance').altname('YOYOW/BTC')
        (Symbol:YOYOW/BTC)

        :param name: currency or symbol name to check for alternative name.
        :type name: Text or Currency or Symbol
        :return: currency alternative name as Currency or Symbol instance.
        :rtype: Currency or Symbol
        """
        _symbol = str(name).upper()
        if '/' in _symbol.strip('/'):
            # print(_symbol)
            base, quote = _symbol.split('/')
            assert quote in self.base_markets, f'{quote} is not a valid base market.'
            base = self.get_currency(base)
            s = self.get_currency(base) + Currency(quote)
            s.price2precision = functools.partial(self.price2precision, s)
            s.cost2precision = functools.partial(self.cost2precision, s)
            s.amount2precision = functools.partial(self.amount2precision, s)
            return s
        else:
            return self.get_currency(_symbol)

    def cost2precision(self, symbol, cost, to_str=True) -> U[Float, Text]:
        """Return cost rounded to symbol precision exchange specifications.

        :param symbol: a valid exchange symbol.
        :type symbol: Text or Symbol
        :param Float cost: cost to be rounded.
        :param Bool to_str: True to return result as str, otherwise result will be returned as float
        :return float: cost rounded to specific symbol exchange specifications.
        """
        result = self._api.cost_to_precision(symbol, cost)
        return result if to_str else float(result)

    def amount2precision(self, symbol, amount, to_str=True) -> U[Float, Text]:
        """Return amount rounded to symbol precision exchange specifications.

        :param symbol: a valid exchange symbol.
        :type symbol: Text or Symbol
        :param Float amount: amount to be rounded.
        :param Bool to_str: True to return result as str, otherwise result will be returned as float
        :return: amount rounded to specific symbol exchange specifications.
        :rtype: float or Text
        """
        result = self._api.amount_to_precision(symbol, amount)
        return result if to_str else float(result)

    def price2precision(self, symbol, price, to_str=True) -> U[Float, Text]:
        """Return price rounded to symbol precision exchange specifications.

        :param symbol: a valid exchange symbol.
        :type symbol: Text or Symbol
        :param price: price to be rounded.
        :param Bool to_str: True to return result as str, otherwise result will be returned as float
        :return float: price rounded to specific symbol exchange specifications.
        """
        result = self._api.price_to_precision(symbol, price)
        return result if to_str else float(result)
        # m = self.markets.get(symbol)  # type: Market
        # t = str if to_str else float
        # template = '{:.@f}'.replace('@', str(m.precision.price))
        # return t(template.format(float(price)))

    # @check_symbol
    def get_orderbook(self, symbol, limit=5) -> pd.DataFrame:
        """Get order book data for a symbol.

        >>> book_entries = PandaXT('binance').get_orderbook('MDA/BTC', 5)
        >>> isinstance(book_entries['ask'].values[-1], float)
        True

        :param symbol: a valid exchange symbol.
        :type symbol: Text or Symbol
        :param int limit: limit entries returned to "limit" value.
        :return: DataFrame type with order book data for "symbol".
        :rtype: pd.DataFrame
        """

        raw = self._api.fetch_order_book(symbol, limit=int(limit))

        columns = ['ask', 'ask_vol', 'bid', 'bid_vol']
        data = [ask + bid for ask, bid in zip(raw['asks'], raw['bids'])]
        df = pd.DataFrame(data, columns=columns)
        return df.round({'ask': 8, 'bid': 8, 'ask_vol': 0, 'bid_vol': 0})

    def get_tickers(self, symbols, sorted_by=None) -> U[Ticker, Tickers]:
        """Get tickers dict with symbol name as keys for all symbols specified as positional args.

        >>> exchange = PandaXT('binance')
        >>> ticker = exchange.get_tickers('ADA/BTC', sorted_by='percentage')
        >>> isinstance(ticker, Ticker)
        True

        :param symbols: list of valid exchange symbols.
        :param sorted_by: a valid ticker field like percentage, last, ask, bid, quoteVolume, ...
        :return: Ticker or Tickers instance with returned data.
        :rtype: Ticker or Tickers
        """
        result = list()

        if isinstance(symbols, (Symbols, List, Tuple, Set)) and len(symbols):
            symbols = [symbols] if isinstance(symbols, str) else list(symbols)

            for s in map(self.altname, symbols):
                if s not in self.markets:
                    log.debug(f'Symbol {s or "NULL"} is not listed in {self.name} exchange.')
                    continue
            try:
                raw = self._api.fetch_tickers(symbols)
                if len(raw) > 0:
                    if str(sorted_by) in list(raw.values())[0].keys():
                        raw = OrderedDict(sorted(raw.items(), key=lambda k: k[1][sorted_by], reverse=True))
                    result = Tickers(**raw)
                    result = result[symbols[0]] if len(symbols) == 1 else result
            except ccxt.ExchangeError as err:
                print(str(err))
        return result

    def get_indicators(self, indicators, symbol, timeframe='15m', limit=25, **kwargs) -> Dict[Text, pd.Series]:
        """Get technical indicators value for a symbol.

        :param Dict indicators: indicators and their params as dict (params ara mandatory, there is no default values).
        :param Text symbol: a valid exchange symbol.
        :param Text timeframe: an exchange valid timeframe (default 15m).
        :param Int limit: a valid exchange limit for returned rows (check exchange official API)
        supplied as a param / value dict instance also. Example: "{'roc': {'period': 9}}"
        :param kwargs: if "ohlc" is set with OHLC data (DataFrame) it will be use for value calculations.
        :return: dict type with indicators name/value pairs.
        """
        indicators = {k.lower(): v for k, v in indicators.items()}
        symbol = Symbol(symbol)
        return_value = OrderedDict.fromkeys(indicators.keys())
        supported_ti = [_a for _a in dir(tulipy.lib) if _a[0].islower()]

        functions = OrderedDict({i: getattr(tulipy, i)
                                 for i in indicators if i in supported_ti})

        data = kwargs.get('ohlc', self.get_ohlc(symbol, timeframe=timeframe, limit=limit))

        for n, fn in functions.items():
            inputs = ['close' if i in 'real' else i for i in fn.inputs]
            indicator_params = dict()
            if len(fn.options):
                options = [opt.replace(' ', '_') for opt in fn.options]
                indicator_params = indicators.get(n)
                indicator_params = {k: v
                                    for k, v in indicator_params.items()
                                    if k in options}

            try:
                raw = fn(*data[inputs].T.values, **indicator_params)
                di = data.index
                if n in 'roc':
                    raw = raw * 100.0
                sr = pd.Series(raw, name=n.upper())
                sr.index = di.values[-len(sr):]
                return_value[n] = sr.copy(True)

            except InvalidOptionError as err:
                print(str(err))

        return dict(ohlc=data, **{k: return_value[k.lower()] for k in indicators})

    def create_market_order(self, symbol, side, amount=None) -> Dict:
        """Create a market order order.

        :param Text symbol: a valid exchange symbol.
        :param Text side: accepted values: "buy", "sell"
        :param float amount: amount used in order creation.
        :return: order creation result data as dict.
        """
        return self.create_order(symbol=symbol, side=side, order_type='market', amount=amount)

    def buy(self, symbol, amount=None, price=None) -> Dict:
        """Create buy order.

        :param Text symbol: a valid exchange symbol.
        :param float amount: amount to buy or None to auto-fill
        :param float price: buy price or None to auto-fill
        :return Dict: order creation result data as dict.
        """
        return self.create_order(symbol=symbol, order_type='limit', side='buy', amount=amount, price=price)

    def create_order(self, symbol, side, order_type=None, amount=None, price=None) -> Dict:
        """Create a new order.

        :param Text symbol: symbol to be use for order creation.
        :param Text side: order side: 'sell' or 'buy'
        :param Text order_type: order type (default 'limit')
        :param float amount: amount used in order creation.
        :param float price: price used in order creation.
        :return: order result info as dict.
        """
        symbol = Symbol(symbol)
        response = dict()

        if symbol not in self.symbols:
            raise SymbolError(f'Invalid symbol: {symbol}')

        if side not in ['buy', 'sell']:
            raise SideError(side)

        currency = symbol.quote if side in 'buy' else symbol.base
        balance_field = 'free' if side in 'buy' else 'total'
        ticker_field = 'ask' if side in 'buy' else 'bid'

        amount = magic2num(
            amount or self.get_balances(balance_field).get(
                currency, 0.0))

        if amount > 0.0:
            price = magic2num(price or self.get_tickers(symbol).get(ticker_field))
            if side in 'buy':
                amount = amount / price
            try:
                response = self._api.create_order(symbol=symbol,
                                                  type=order_type or 'limit',
                                                  side=side,
                                                  amount=amount,
                                                  price=price)
            except ccxt.InvalidOrder as err:
                print(f' - [ERROR] {str(err)}', file=sys.stderr)
                response = dict()
        return response

    def sell(self, symbol, amount=None, price=None) -> Dict:
        """Create sell order.

        :param Text symbol: a valid exchange symbol.
        :param Float amount: amount to sell or None to auto-fill
        :param Float price: sell price or None to auto-fill
        :return dict: order creation result data as dict.
        """
        return self.create_order(symbol=symbol, order_type='limit', side='sell', amount=amount, price=price)

    def get_balances(self, field=None, tradeables_only=True, fiat=None) -> Wallet:
        """Get balances.

        >>> balances = PandaXT('binance').get_balances('total')
        >>> isinstance(balances, Wallet)
        True

        :param Text field: accepted values: if None all balances will be loaded, (values; "total", "used", "free")
        :param Bool tradeables_only:
        :return: positive balances.
        """

        def is_zero(v) -> U[Float, Dict]:
            if isinstance(v, float):
                return v <= 0.0
            elif isinstance(v, dict):
                return v.get('total', 0.0) <= 0.0
            else:
                return 0.0

        def is_tradeable(currency, _tickers, balance, base_market=None) -> Bool:
            """Check if a currency balance is over minimum tradeable amount for a base market.

            :param Text currency:
            :param Tickers _tickers:
            :param balance:
            :type balance: Dict or Float
            :param Text base_market:
            :return: True if currency is tradeable
            """
            if currency in self.base_markets:
                return True

            base_market = Currency(base_market or 'BTC')
            symbol: Symbol = self.altname(currency) + base_market

            market: Market = self.markets.get(symbol, False)
            if not market:
                return False
            # min amount in quote currency
            limits: Limit = market.limits
            quote_min_amount = limits.amount

            ticker: Ticker = _tickers[symbol]
            if currency == 'BTC' and 'USD' in base_market:
                last = 1.0 / ticker.last
            else:
                last = ticker.last
            # converting min_amount to base currency

            base_min_amount = last * quote_min_amount
            # subtracting a 0.01% fee
            base_min_amount = base_min_amount * 0.999
            if isinstance(balance or [], dict):
                balance = balance.get('total', 0.0)
            else:
                balance = balance or 0.0
            return balance > base_min_amount

        try:
            data = self._api.fetch_balance()

            if 'info' in data:
                del data['info']

            data: dict = data.pop(field) if field else data

            data = {str(k): v
                    for k, v in data.items()
                    if not is_zero(v)}

            if tradeables_only:
                symbols = {Currency(c) + Currency('BTC') for c in data}
                tickers = self.get_tickers(symbols)
                data = {str(k): v
                        for k, v in data.items()
                        if k in self._basemarkets or is_tradeable(k, tickers, v)}
            wallet = Wallet(**data)
            if str(fiat or '').lower() in ['EUR', 'USD']:
                acum = 0
                for k, v in Wallet(**data).items():
                    if fiat.lower() == 'eur':
                        acum += v.to_eur
                    elif fiat.lower() == 'usd':
                        acum += v.to_usd
                wallet.update({fiat.upper(): acum})
            return wallet
        except ccxt.NetworkError as err:
            print('PandaXT::get_ohlc', str(err))

    def get_balance(self, currency, field=None) -> Balance:
        """Get balance for a currency.

        >>> PandaXT('binance').get_balance('STORJ', 'total')

        :param Text currency: a valid exchange currency.
        :param Text field: accepted values: total, used, free
        :return: currency with balance amount as float.
        """
        currency = self.altname(str(currency))

        if currency not in self.currencies:
            raise CurrencyError(currency)

        if field and field in ['total', 'free', 'used']:
            field = field
        else:
            field = None

        balance_data = self.get_balances(field=field) or Balance(
            **{'currency': currency, field: 0.0})
        if currency not in balance_data:
            raise ccxt.InsufficientFunds()
        else:
            return balance_data[currency]

    def get_user_trades(self, symbol, side=None, limit=25) -> pd.DataFrame:
        """Get user trades filter by symbol.

        :param Text symbol: a valid exchange symbol
        :param Text side: "buy" or "sell"
        :param int limit: a valid limit for rows return (please, refer to official exchange API manual for details)
        :return pd.DataFrame: user trades as pandas DataFrame type.
        """
        symbol = str(symbol).upper()
        if symbol not in (self.symbols, self.altname(symbol) or ''):
            raise SymbolError(symbol)
        else:
            symbol = Symbol(
                symbol) if symbol in self.symbols else self.altname(symbol)

        trades = self._api.fetch_my_trades(symbol, limit=limit)
        if trades:
            trades = [{k: v for k, v in t.items() if k not in 'info'}
                      for t in trades]
            for idx, t in enumerate(trades.copy()):
                trades[idx].update(total_cost=trades[idx]['fee']['cost'])
                del trades[idx]['fee']
            trades = pd.DataFrame(trades)
            trades['real_cost'] = trades['cost'] + \
                                  (trades['cost'] * trades['price'])
            # TODO: not totally true so revise it
            trades['real_price'] = trades['price'] * 1.001
            trades['real_amount'] = trades['real_cost'] * trades['price']
            if str(side).lower() in ['buy', 'sell']:
                trades = trades.query(f'side == "{str(side).lower()}"')

            return trades.sort_index(ascending=False)

    def get_trades(self, symbol, side=None, limit=25) -> pd.DataFrame:
        """Get latest user trades for a symbol.

        :param Text symbol: a valid exchange symbol
        :param Text side: "buy" or "sell"
        :param int limit: a valid limit for rows return (please, refer to official exchange API manual for details)
        :return: latest trades for a symbol.
        """
        symbol = self.altname(symbol)
        trades = self._api.fetch_trades(symbol, limit=limit)
        if trades:
            trades = [{k: v for k, v in t.items() if k not in 'info' and v}
                      for t in trades]
        trades = pd.DataFrame(trades).set_index('timestamp')
        if str(side).lower() in ['buy', 'sell']:
            trades = trades.query(f'side == "{str(side).lower()}"')
        return trades.sort_index(ascending=False)

    def get_cost(self, symbol, **kwargs) -> Float:
        """FIXME do not work well sometimes giving a bad result by unknown reason.

        Get weighted average (from buy trades data) cost for a symbol.

        >>> api = PandaXT('binance')
        >>> symbol_cost = api.get_cost('AGI/BTC')
        >>> symbol_cost
        True

        :param U[Symbol,Text] symbol: a valid exchange symbol.
        :param kwargs: accepted keys are: balance (Balance) and trades (pd.DataFrame)
        :return: cost calculation result as float type.
        """
        symbol = self.altname(symbol)
        if isinstance(symbol or 0, Currency):
            symbol = symbol + Currency('BTC')

        base, quote = symbol.parts

        # reuse provided balance and trades data (for rate limit save)
        if 'balance' in kwargs:
            cached = kwargs.get('balance')  # type: Balance
            balance = cached[base] if isinstance(cached, Wallet) else cached
        else:
            balance = self.get_balance(base, field='total')  # type: Balance

        total_balance = balance.total if balance and hasattr(
            balance, 'total') else balance

        if total_balance > 0.0:
            trades = kwargs.get('trades', [])
            if not trades:
                trades = self.get_user_trades(symbol, side='buy')
            elif not isinstance(trades, pd.DataFrame):
                trades = pd.DataFrame(trades)
            if trades['order'].isna().all():
                trades['order'].update(trades['id'])

            # group-by operations per column
            columns_op = {'amount': 'mean',
                          'price': 'mean',
                          'cost': 'mean',
                          'timestamp': 'mean'}

            trades = trades.groupby('order').agg(columns_op).sort_index(
                ascending=False)  # type: pd.DataFrame
            trades = trades[['price', 'amount']].reset_index(drop=True)

            for index, price, amount in trades.itertuples():
                if round(total_balance - amount, 8) <= 0.0:
                    if round(total_balance - amount, 8) != 0.0:
                        prices = trades[:index + 1]['price'].values
                        amounts = trades[:index + 1]['amount'].values
                        amounts[-1] = total_balance
                    else:
                        prices, amounts = trades[:index + 1].T.values
                    return round(pd.np.average(prices, weights=amounts), 10)
                else:
                    total_balance -= amount

    def get_order_status(self, order_id, symbol=None) -> Text:
        """Get order status by order_id.

        :param Text order_id: a valid order id.
        :param Text symbol: a valid exchange market
        :return: order status as str. Possible values are: "closed",  "canceled", "open"
        """
        return self._api.fetch_order_status(order_id, symbol=symbol)

    def get_open_orders(self, symbol=None, order_id=None) -> List[Dict[Symbol, Dict]]:
        """Get open orders.md for a symbol.

        :param Text symbol: symbol used in opened orders.md server query.
        :param order_id: just return data for a specific order.
        :type order_id: Text or int
        :return List: list of open orders.md for specific symbol.
        """

        raw = self._api.fetch_open_orders(symbol or order_id)
        if isinstance(raw or 0, list) and len([r for r in raw if r]):
            return [{Symbol(k): v for k, v in r.items() if k not in 'info'} for r in raw]
        else:
            return list()

    def get_profit(self, currency) -> U[Tuple, Float]:
        """Returns current profit for a currency and its weighted average buy cost.

        :param Text currency: a valid currency to use at profit and cost calc
        :return: current profit and weighted average buy cost as a tuple
        """
        currency = Currency(str(currency))
        btc_symbol = currency + Currency('BTC')  # type: Symbol
        balance = self.get_balance(currency)

        if balance.used > 0.0:
            cost = self.get_cost(symbol=btc_symbol)
            return balance.to_btc - (cost * balance.total)
        else:
            return 0.0, 0.0

    def get_withdrawals(self):
        """Get withdrawals info.

        :return:
        """
        if self._api.has['fetchWithdrawals']:
            result = self._api.fetch_withdrawals()
            return [{k: v for k, v in r.items() if k != 'info'} for r in result]
        else:
            raise ccxt.NotSupported('Withdrawals not supported by this exchange.')

    def get_transactions(self):
        """Get transactions info.

        :return:
        """
        if self._api.has['fetchTransactions']:
            result = self._api.fetch_transactions()
            return [{k: v for k, v in r.items() if k != 'info'} for r in result]
        else:
            raise ccxt.NotSupported('Transactions not supported by this exchange.')

    def get_deposits(self):
        """Get deposits info.

        :return:
        """
        if self._api.has['fetchDeposits']:
            resultb = self._api.fetch_withdrawals()
            return  [{k: v for k, v in r.items() if k != 'info'} for r in result]
        else:
            raise ccxt.NotSupported('Deposits not supported by this exchange.')

    def cancel_order(self, symbol, last_only=False) -> List:
        """Cancel symbols open orders.md for a symbol.

        :param symbol: the symbol with open orders.md.
        :type symbol: Bool or Symbol
        :param Bool last_only: if True, only last order sent will be cancelled.
        :return: list of dict with data about cancellations.
        """
        symbol = Symbol(symbol)
        pending_orders = self.get_open_orders(symbol)

        if len(pending_orders):
            if last_only:
                return self._api.cancel_order(pending_orders[-1]['id'], symbol)
            else:
                canceled_orders = list()
                for p in pending_orders:
                    return_value = self._api.cancel_order(p['id'], symbol)

                    if return_value and return_value.get(
                        'status', '') in 'cancel':
                        canceled_orders.append(
                            {k: v for k, v in return_value.items() if v})
                    else:
                        self._api.cancel_order(p['id'], symbol)
                return canceled_orders

    def __str__(self) -> Text:
        """PandaXT instance as "str" type representation.

        >>> str(PandaXT('binance'))
        'binance'

        :return Text: PandaXT instance as "str" type representation.
        """
        return self.id

    def __repr__(self):
        """PandaXT instance as "str" type representation.

        >>> PandaXT('binance')
        binance

        :return str: PandaXT instance as "str" type representation.
        """
        return self.id

    def __contains__(self, item) -> Bool:
        """Check if a symbol or currency is supported by exchange.

        >>> exchange = PandaXT('binance')
        >>> Currency('ETH') in exchange
        True
        >>> Currency('ETH/BTC') in exchange
        True
        >>> Currency('MyCOIN') in exchange
        False

        :param item: currency or symbol for supports checking on exchange.
        :type item: Text or Currency or Symbol
        :return bool: True is item is supported, otherwise False.
        """
        return str(item) in self.markets or str(item) in map(str, self.currencies)
class FileDirCache(MutableMapping):
    def __init__(
        self,
        use_listings_cache=True,
        listings_expiry_time=None,
        listings_cache_location=None,
        **kwargs,
    ):
        """

        Parameters
        ----------
        use_listings_cache: bool
            If False, this cache never returns items, but always reports KeyError,
            and setting items has no effect
        listings_expiry_time: int or float (optional)
            Time in seconds that a listing is considered valid. If None,
            listings do not expire.
        listings_cache_location: str (optional)
            Directory path at which the listings cache file is stored. If None,
            an autogenerated path at the user folder is created.

        """
        import appdirs
        from diskcache import Cache

        listings_expiry_time = listings_expiry_time and float(listings_expiry_time)

        if listings_cache_location:
            listings_cache_location = Path(listings_cache_location) / str(listings_expiry_time)
            listings_cache_location.mkdir(exist_ok=True, parents=True)
        else:
            listings_cache_location = Path(appdirs.user_cache_dir(appname="fsspec_dircache")) / str(
                listings_expiry_time
            )

        try:
            listings_cache_location.mkdir(exist_ok=True, parents=True)
        except Exception:
            logger.error(f"folder for dircache could not be created at {listings_cache_location}")

        self.cache_location = listings_cache_location

        self._cache = Cache(directory=listings_cache_location)
        self.use_listings_cache = use_listings_cache
        self.listings_expiry_time = listings_expiry_time

    def __getitem__(self, item):
        """Draw item as fileobject from cache, retry if timeout occurs"""
        return self._cache.get(key=item, read=True, retry=True)

    def clear(self):
        self._cache.clear()

    def __len__(self):
        return len(list(self._cache.iterkeys()))

    def __contains__(self, item):
        value = self._cache.get(item, retry=True)  # None, if expired
        if value:
            return True
        return False

    def __setitem__(self, key, value):
        if not self.use_listings_cache:
            return
        self._cache.set(key=key, value=value, expire=self.listings_expiry_time, retry=True)

    def __delitem__(self, key):
        del self._cache[key]

    def __iter__(self):
        return (k for k in self._cache.iterkeys() if k in self)

    def __reduce__(self):
        return (
            FileDirCache,
            (self.use_listings_cache, self.listings_expiry_time, self.cache_location),
        )
Пример #22
0
class RouteHandler(object):
    def __init__(self, loop, messenger, database):
        self._loop = loop
        self._messenger = messenger
        self._database = database
        self.cache = Cache()

    async def create_election(self, request):
        private_key, public_key, user = await self._authorize(request)
        body = await decode_request(request)
        required_fields = [
            'name', 'description', 'start_timestamp', 'end_timestamp',
            'results_permission', 'can_change_vote', 'can_show_realtime',
            'voting_options', 'poll_book'
        ]
        validate_fields(required_fields, body)

        if user.get('type') != 'ADMIN' and user.get('type') != 'SUPERADMIN':
            raise ApiForbidden('Voter must be an admin or superadmin')

        election_id = uuid.uuid1().hex
        voting_options = body.get('voting_options')
        admin = await self._database.fetch_voter_resource(public_key=public_key
                                                          )

        for voting_option in voting_options:
            if voting_option.get('name').upper(
            ) == "NULL" or voting_option.get('name').upper() == "BLANK":
                raise ApiInternalError('NULL and BLANK are default options')

        voting_options.append({"name": "NULL", "description": "VOTE NULL"})
        voting_options.append({"name": "BLANK", "description": "VOTE BLANK"})

        await self._messenger.send_create_election_transaction(
            private_key=private_key,
            election_id=election_id,
            name=body.get('name'),
            description=body.get('description'),
            start_timestamp=body.get('start_timestamp'),
            end_timestamp=body.get('end_timestamp'),
            results_permission=body.get('results_permission'),
            can_change_vote=body.get('can_change_vote'),
            can_show_realtime=body.get('can_show_realtime'),
            admin_id=admin.get('voter_id'),
            status=1,
            timestamp=get_time())

        for voting_option in voting_options:
            voting_option_id = uuid.uuid1().hex

            await self._messenger.send_create_voting_option_transaction(
                private_key=private_key,
                voting_option_id=voting_option_id,
                name=voting_option.get('name'),
                description=voting_option.get('description'),
                election_id=election_id,
                status=1,
                timestamp=get_time())

            await self._database.insert_voting_option_num_vote_resource(
                voting_option_id=voting_option_id,
                name=voting_option.get('name'),
                election_id=election_id)

        for poll_book in body.get('poll_book'):
            await self._messenger.send_create_poll_registration_transaction(
                private_key=private_key,
                voter_id=poll_book.get('id'),
                name=poll_book.get('name'),
                election_id=election_id,
                status=1,
                timestamp=get_time())

        return json_response({'data': 'Create election transaction submitted'})

    async def create_voter(self, request):
        body = await decode_request(request)
        required_fields = ['voter_id', 'name', 'password']
        validate_fields(required_fields, body)

        if await self._database.is_voter_created(body.get("voter_id")
                                                 ) is not None:
            raise ApiConflict("Voter ID must be unique")

        public_key, private_key = self._messenger.get_new_key_pair()

        await self._messenger.send_create_voter_transaction(
            private_key=private_key,
            voter_id=body.get('voter_id'),
            public_key=public_key,
            name=body.get('name'),
            created_at=get_time(),
            type='VOTER')

        encrypted_private_key = encrypt_private_key(request.app['aes_key'],
                                                    public_key, private_key)
        hashed_password = hash_password(body.get('password'))

        await self._database.create_auth_entry(public_key,
                                               encrypted_private_key,
                                               hashed_password)

        user = {
            'name': body.get('name'),
            'voter_id': body.get('voter_id'),
            'type': 'VOTER'
        }

        token = self.generate_auth_token(request.app['secret_key'], public_key,
                                         user)

        return json_response({'accessToken': token, 'user': user})

    async def update_voter_type(self, request):
        private_key, public_key, user = await self._authorize(request)
        body = await decode_request(request)
        required_fields = ['type']
        validate_fields(required_fields, body)

        voter_id = request.match_info.get('voterId', '')
        if voter_id == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        if user.get('type') != 'SUPERADMIN':
            raise ApiForbidden('Forbidden')

        voter = await self._database.fetch_voter_resource(voter_id=voter_id)

        if voter is None:
            raise ApiNotFound('No voter found')

        if body.get('type') == 'ADMIN' and (voter.get('type') == 'ADMIN' or
                                            voter.get('type') == 'SUPERADMIN'):
            raise ApiConflict(
                'Voter {} is already an admin or superadmin'.format(voter_id))
        elif body.get('type') == 'VOTER' and voter.get('type') == 'VOTER':
            raise ApiConflict('Voter {} is already a voter. '.format(voter_id))

        auth_info = await self._database.fetch_auth_resource(
            public_key=voter.get('public_key'))
        voter_private_key = decrypt_private_key(
            request.app['aes_key'], voter.get('public_key'),
            auth_info.get('encrypted_private_key'))

        await self._messenger.send_update_voter_transaction(
            private_key=voter_private_key,
            voter_id=voter_id,
            public_key=voter.get('public_key'),
            name=voter.get('name'),
            created_at=get_time(),
            type=body.get('type'))

        return json_response({
            'voter': {
                'voter_id': voter_id,
                'name': voter.get('name'),
                'type': 'ADMIN'
            }
        })

    async def create_vote(self, request):
        body = await decode_request(request)
        required_fields = []
        validate_fields(required_fields, body)

        private_key, public_key, user = await self._authorize(request)
        voting_option_id = request.match_info.get('votingOptionId', '')

        if voting_option_id == '':
            raise ApiBadRequest(
                'The voting option ID is a required query string parameter')

        voter = await self._database.fetch_voter_resource(public_key=public_key
                                                          )

        if voter is None:
            raise ApiNotFound('Voter with the public_key '
                              '{} was not found'.format(public_key))

        voting_option = await self._database.fetch_voting_option_resource(
            voting_option_id=voting_option_id)
        vo_count_vote = await self._database.fetch_voting_option_num_vote_resource(
            voting_option_id=voting_option_id)

        if voting_option is None:
            raise ApiNotFound('Voting Option with the voting option id '
                              '{} was not found'.format(voting_option_id))

        election_id = voting_option.get('election_id')
        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election.get('status') == 0:
            raise ApiBadRequest('Election with the election id '
                                '{} is cancelled'.format(election_id))

        current_time = get_time()

        if election.get('end_timestamp') < current_time or election.get(
                'start_timestamp') > current_time:
            raise ApiBadRequest('Not in election time.'.format())

        poll_registration = await self._database.fetch_poll_book_registration(
            voter_id=user.get('voter_id'), election_id=election_id)

        if poll_registration is None:
            raise ApiBadRequest(
                'Voter is not registered in the poll book of the election with the id '
                '{} .'.format(election_id))

        num_votes_update = vo_count_vote.get('num_votes') + 1

        await self._messenger.send_create_vote_transaction(
            private_key=private_key,
            vote_id=uuid.uuid1().hex,
            timestamp=get_time(),
            voter_id=voter.get('voter_id'),
            election_id=voting_option.get('election_id'),
            voting_option_id=voting_option_id)

        await self._database.update_voting_option_num_vote_resource(
            voting_option_id=voting_option_id, num_votes=num_votes_update)

        return json_response({'data': 'Create vote transaction submitted'})

    async def update_vote(self, request):
        private_key, public_key, user = await self._authorize(request)
        body = await decode_request(request)
        required_fields = ['voting_option_id']
        validate_fields(required_fields, body)

        vote_id = request.match_info.get('voteId', '')

        if vote_id == '':
            raise ApiBadRequest(
                'The vote ID is a required query string parameter')

        vote = await self._database.fetch_vote_resource(vote_id=vote_id)
        election_id = vote.get('election_id')

        if vote is None:
            raise ApiNotFound('Vote with the vote id '
                              '{} was not found'.format(vote_id))

        if vote.get('voting_option_id') == body.get('voting_option_id'):
            raise ApiBadRequest('Vote must be different.')

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('can_change_vote') == 0:
            raise ApiInternalError(
                'Election with the election id '
                '{} was not found don\'t permit to change vote'.format(
                    election_id))

        if election.get('can_change_vote') == 0:
            raise ApiInternalError(
                'Election with the election id '
                '{} was not found don\'t permit to change vote'.format(
                    election_id))

        current_time = get_time()

        if election.get('end_timestamp') < current_time or election.get(
                'start_timestamp') > current_time:
            raise ApiBadRequest('Not in election time.'.format())

        if election.get('admin_id') == user.get('voter_id'):
            raise ApiBadRequest(
                'User is not the owner of the election with the id '
                '{} .'.format(election_id))

        new_voting_option_id = body.get('voting_option_id')
        old_voting_option_id = vote.get('voting_option_id')

        old_num_vote = await self._database.fetch_voting_option_num_vote_resource(
            voting_option_id=old_voting_option_id)
        new_num_vote = await self._database.fetch_voting_option_num_vote_resource(
            voting_option_id=new_voting_option_id)
        num_votes_remove = old_num_vote.get('num_votes') - 1
        num_votes_update = new_num_vote.get('num_votes') + 1

        await self._messenger.send_update_vote_transaction(
            private_key=private_key,
            vote_id=vote_id,
            timestamp=get_time(),
            voting_option_id=new_voting_option_id)

        # remove -1 to old voting option
        await self._database.update_voting_option_num_vote_resource(
            voting_option_id=old_voting_option_id, num_votes=num_votes_remove)

        # add +1 to new voting option
        await self._database.update_voting_option_num_vote_resource(
            voting_option_id=new_voting_option_id, num_votes=num_votes_update)

        return json_response({'data': 'Update Vote transaction submitted'})

    async def update_election(self, request):
        private_key, public_key, user = await self._authorize(request)
        body = await decode_request(request)
        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        current_time = get_time()

        if election.get('start_timestamp') < current_time:
            raise ApiBadRequest('Election with the election id '
                                '{} already start.'.format(election_id))

        if election.get('admin_id') != user.get('voter_id'):
            raise ApiBadRequest(
                'User is not the owner of the election with the id '
                '{} .'.format(election_id))

        await self._messenger.send_update_election_transaction(
            private_key=private_key,
            election_id=election_id,
            name=body.get('name')
            if body.get('name') is not None else election.get('name'),
            description=body.get('description') if body.get('description')
            is not None else election.get('description'),
            start_timestamp=body.get('start_timestamp')
            if body.get('start_timestamp') is not None else
            election.get('start_timestamp'),
            end_timestamp=body.get('end_timestamp')
            if body.get('end_timestamp') is not None else
            election.get('end_timestamp'),
            results_permission=body.get('results_permission')
            if body.get('results_permission') is not None else
            election.get('results_permission'),
            can_change_vote=body.get('can_change_vote')
            if body.get('can_change_vote') is not None else
            election.get('can_change_vote'),
            can_show_realtime=body.get('can_show_realtime')
            if body.get('can_show_realtime') is not None else
            election.get('can_show_realtime'),
            admin_id=user.get('voter_id'),
            status=body.get('status')
            if body.get('status') is not None else election.get('status'),
            timestamp=get_time())

        if body.get('voting_options') is not None:
            for voting_option in body.get('voting_options'):
                voting_option_id = uuid.uuid1().hex

                await self._messenger.send_create_voting_option_transaction(
                    private_key=private_key,
                    voting_option_id=voting_option_id,
                    name=voting_option.get('name'),
                    description=voting_option.get('description'),
                    election_id=election_id,
                    status=1,
                    timestamp=get_time())

                await self._database.insert_voting_option_num_vote_resource(
                    voting_option_id=voting_option_id,
                    name=voting_option.get('name'),
                    election_id=election_id)
        if body.get('poll_book') is not None:
            for poll_book in body.get('poll_book'):
                await self._messenger.send_create_poll_registration_transaction(
                    private_key=private_key,
                    voter_id=poll_book.get('id'),
                    name=poll_book.get('name'),
                    election_id=election_id,
                    status=1,
                    timestamp=get_time())

        return json_response({'data': 'Update Election transaction submitted'})

    async def get_election(self, request):
        private_key, public_key, user = await self._authorize(request)

        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        if 'asAdmin' in request.rel_url.query:
            election = await self._database.fetch_election_with_can_vote_resource_admin(
                voter_id=user.get('voter_id'), election_id=election_id)
        else:
            election = await self._database.fetch_election_with_can_vote_resource(
                voter_id=user.get('voter_id'), election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('results_permission') != 'PUBLIC':
            if election.get('can_vote') is False and election.get(
                    'admin_id') != user.get('voter_id'):
                raise ApiForbidden(
                    'Voter is not registered in the poll book of the election with the id '
                    '{}.'.format(election_id))

        return json_response(election)

    async def get_election_votes(self, request):
        private_key, public_key, user = await self._authorize(request)
        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        number_of_votes = await self._database.fetch_number_of_votes(
            election_id=election_id)

        if number_of_votes is None:
            raise ApiNotFound('No voting options with the election id '
                              '{} was not found'.format(election_id))

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('results_permission') != 'PUBLIC':

            poll_registration = await self._database.fetch_poll_book_registration(
                voter_id=user.get('voter_id'), election_id=election_id)

            if poll_registration is None and election.get(
                    'admin_id') != user.get('voter_id'):
                raise ApiBadRequest(
                    'Voter is not registered in the poll book of the election with the id '
                    '{} .'.format(election_id))

        return json_response(number_of_votes)

    async def get_poll_registrations(self, request):
        private_key, public_key, user = await self._authorize(request)
        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        poll_book = await self._database.fetch_poll_book(
            election_id=election_id)

        if poll_book is None:
            raise ApiNotFound('No voters with the election id '
                              '{} was not found'.format(election_id))

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('admin_id') != user.get('voter_id'):
            raise ApiBadRequest(
                'User is not the owner of the election with the id '
                '{} .'.format(election_id))

        return json_response(poll_book)

    async def count_poll_registrations(self, request):
        private_key, public_key, user = await self._authorize(request)
        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        count_poll_book = await self._database.count_poll_book(
            election_id=election_id)

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('results_permission') != 'PUBLIC':

            poll_registration = await self._database.fetch_poll_book_registration(
                voter_id=user.get('voter_id'), election_id=election_id)

            if poll_registration is None and election.get(
                    'admin_id') != user.get('voter_id'):
                raise ApiBadRequest(
                    'Voter is not registered in the poll book of the election with the id '
                    '{} .'.format(election_id))

        return json_response(count_poll_book)

    async def list_voting_options_election(self, request):
        private_key, public_key, user = await self._authorize(request)
        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        voting_options = await self._database.fetch_election_voting_options_resource(
            election_id=election_id)

        if voting_options is None:
            raise ApiNotFound('Voting Options in the election id '
                              '{} was not found'.format(election_id))

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        if election.get('results_permission') != 'PUBLIC':

            poll_registration = await self._database.fetch_poll_book_registration(
                voter_id=user.get('voter_id'), election_id=election_id)

            if poll_registration is None and election.get(
                    'admin_id') != user.get('voter_id'):
                raise ApiForbidden(
                    'Voter is not registered in the poll book of the election with the id '
                    '{} .'.format(election_id))

        return json_response(voting_options)

    async def get_voting_option(self, request):
        private_key, public_key, user = await self._authorize(request)
        voting_option_id = request.match_info.get('votingOptionId', '')

        if voting_option_id == '':
            raise ApiBadRequest(
                'The voting option ID is a required query string parameter')

        voting_option = await self._database.fetch_voting_option_resource(
            voting_option_id=voting_option_id)

        if voting_option is None:
            raise ApiNotFound('No voting options with the id '
                              '{} was not found'.format(voting_option_id))

        return json_response(voting_option)

    async def update_voting_option_status(self, request):
        private_key, public_key, user = await self._authorize(request)
        voting_option_id = request.match_info.get('votingOptionId', '')

        if voting_option_id == '':
            raise ApiBadRequest(
                'The voting option ID is a required query string parameter')

        voting_option = await self._database.fetch_voting_option_resource(
            voting_option_id=voting_option_id)

        if voting_option is None:
            raise ApiNotFound('Voting Option with the voting option id '
                              '{} was not found'.format(voting_option_id))

        election_id = voting_option.get('election_id')
        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        current_time = get_time()

        if election.get('start_timestamp') < current_time:
            raise ApiBadRequest('Election with the election id '
                                '{} already start.'.format(election_id))

        if election.get('admin_id') != user.get('voter_id'):
            raise ApiBadRequest(
                'User is not the owner of the election with the id '
                '{} .'.format(election_id))

        if voting_option.get('status') is True:
            status = 0
        else:
            status = 1

        await self._messenger.send_update_voting_option_status_transaction(
            private_key=private_key,
            voting_option_id=voting_option_id,
            name=voting_option.get('name'),
            description=voting_option.get('description'),
            election_id=voting_option.get('election_id'),
            status=status,
            timestamp=get_time())

        return json_response(
            {'data': 'Update Voting Option Status transaction submitted'})

    async def update_poll_book_status(self, request):
        private_key, public_key, user = await self._authorize(request)
        voter_id = request.match_info.get('voterId', '')

        if voter_id == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        voter_poll_book = await self._database.fetch_poll_book_registration(
            election_id=election_id, voter_id=voter_id)

        if voter_poll_book is None:
            raise ApiNotFound('Voter with the voter id '
                              '{} was not found'.format(voter_id))

        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        current_time = get_time()

        if election.get('start_timestamp') < current_time:
            raise ApiBadRequest('Election with the election id '
                                '{} already start.'.format(election_id))

        if election.get('admin_id') != user.get('voter_id'):
            raise ApiBadRequest(
                'User is not the owner of the election with the id '
                '{} .'.format(election_id))

        if voter_poll_book.get('status') is True:
            status = 0
        else:
            status = 1

        await self._messenger.send_update_voter_poll_book_status_transaction(
            private_key=private_key,
            voter_id=voter_id,
            name=voter_poll_book.get('name'),
            election_id=election_id,
            status=status,
            timestamp=get_time())

        return json_response(
            {'data': 'Update Poll Registration Status transaction submitted'})

    async def list_elections_current(self, request):
        private_key, public_key, user = await self._authorize(request)
        voterId = request.match_info.get('voterId', '')

        if voterId == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        if user.get('voter_id') != voterId:
            raise ApiForbidden('Admin must be the authenticated one')

        current_elections_list = await self._database.fetch_current_elections_resources(
            voterId, get_time())
        return json_response(current_elections_list)

    async def list_elections_past(self, request):
        private_key, public_key, user = await self._authorize(request)
        voterId = request.match_info.get('voterId', '')

        if voterId == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        if user.get('voter_id') != voterId:
            raise ApiForbidden('Admin must be the authenticated one')

        past_elections_list = await self._database.fetch_past_elections_resources(
            voterId, get_time())

        return json_response(past_elections_list)

    async def list_admin_elections(self, request):
        private_key, public_key, user = await self._authorize(request)

        voter_id = request.match_info.get('voterId', '')

        if voter_id == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        if user.get('voter_id') != voter_id:
            raise ApiForbidden('Admin must be the authenticated one')

        if user.get('type') != 'ADMIN' and user.get('type') != 'SUPERADMIN':
            raise ApiForbidden('Voter must be an admin or superadmin')

        admin_elections_list = await self._database.fetch_admin_elections_resources(
            user.get('voter_id'))
        return json_response(admin_elections_list)

    async def list_public_elections(self, request):
        private_key, public_key, user = await self._authorize(request)

        public_elections_list = await self._database.fetch_public_elections_resources(
            get_time())
        return json_response(public_elections_list)

    async def list_public_past_elections(self, request):
        private_key, public_key, user = await self._authorize(request)

        past_elections_list = await self._database.fetch_public_past_elections_resources(
            user.get('voter_id'), get_time())

        return json_response(past_elections_list)

    async def list_admins(self, request):
        private_key, public_key, user = await self._authorize(request)

        if user.get('type') != 'SUPERADMIN':
            raise ApiForbidden('Forbidden')

        admin_list = await self._database.fetch_admins_resources()

        return json_response(admin_list)

    async def get_voters(self, request):
        private_key, public_key, user = await self._authorize(request)

        if user.get('type') != 'SUPERADMIN':
            raise ApiForbidden('Forbidden')

        voter_id = request.match_info.get('voterId', '')
        voters_list = await self._database.fetch_voters_resources(
            voter_id=voter_id)

        return json_response(voters_list)

    async def get_vote(self, request):
        private_key, public_key, user = await self._authorize(request)
        vote_id = request.match_info.get('voteId', '')

        if vote_id == '':
            raise ApiBadRequest(
                'The vote ID is a required query string parameter')

        vote = await self._database.fetch_vote_resource(vote_id=vote_id)

        if vote is None:
            raise ApiNotFound('Vote with the vote id '
                              '{} was not found'.format(vote_id))

        election_id = vote.get('election_id')
        election = await self._database.fetch_election_resource(
            election_id=election_id)

        if election is None:
            raise ApiNotFound('Election with the election id '
                              '{} was not found'.format(election_id))

        return json_response(vote)

    async def get_vote_election(self, request):
        private_key, public_key, user = await self._authorize(request)
        voter_id = request.match_info.get('voterId', '')

        if voter_id == '':
            raise ApiBadRequest(
                'The voter ID is a required query string parameter')

        election_id = request.match_info.get('electionId', '')

        if election_id == '':
            raise ApiBadRequest(
                'The election ID is a required query string parameter')

        if user.get('voter_id') != voter_id:
            raise ApiForbidden('Admin must be the authenticated one')

        vote = await self._database.fetch_my_vote__election_resource(
            voter_id=voter_id, election_id=election_id)

        return json_response(vote)

    async def authenticate(self, request):
        body = await decode_request(request)
        required_fields = ['voter_id', 'password']
        validate_fields(required_fields, body)

        password = bytes(body.get('password'), 'utf-8')

        voter = await self._database.fetch_voter_resource(
            voter_id=body.get('voter_id'))
        if voter is None:
            raise ApiUnauthorized('Incorrect voter_id or password')

        auth_info = await self._database.fetch_auth_resource(
            public_key=voter.get('public_key'))
        if auth_info is None:
            raise ApiUnauthorized('No voter with that public key exists')

        hashed_password = auth_info.get('hashed_password')
        if not bcrypt.checkpw(password, bytes.fromhex(hashed_password)):
            raise ApiUnauthorized('Incorrect public key or password')

        user = {
            'name': voter.get('name'),
            'voter_id': body.get('voter_id'),
            'type': voter.get('type')
        }

        token = self.generate_auth_token(request.app['secret_key'],
                                         voter.get('public_key'), user)

        return json_response({'accessToken': token, 'user': user})

    async def _authorize(self, request):
        token = request.headers.get('AUTHORIZATION')
        if token is None:
            raise ApiUnauthorized('No auth token provided')
        token_prefixes = ('Bearer', 'Token')
        for prefix in token_prefixes:
            if prefix in token:
                token = token.partition(prefix)[2].strip()
        try:
            token_dict = self.deserialize_auth_token(request.app['secret_key'],
                                                     token)
        except BadSignature:
            raise ApiUnauthorized('Invalid auth token')
        public_key = token_dict.get('public_key')

        auth_resource = await self._database.fetch_auth_resource(
            public_key=public_key)
        if auth_resource is None:
            raise ApiUnauthorized('Token is not associated with an agent')

        user = self.cache.get(token)
        return decrypt_private_key(
            request.app['aes_key'], public_key,
            auth_resource['encrypted_private_key']), public_key, user

    async def logout(self, request):
        await self._authorize(request)
        token = request.headers.get('AUTHORIZATION')
        token_prefixes = ('Bearer', 'Token')
        for prefix in token_prefixes:
            if prefix in token:
                token = token.partition(prefix)[2].strip()

        self.cache.delete(token)
        return json_response("Successful logout")

    def generate_auth_token(self, secret_key, public_key, user):
        serializer = Serializer(secret_key, expires_in=3600)
        token = serializer.dumps({'public_key': public_key})
        decoded_token = token.decode('ascii')
        self.cache.set(decoded_token, user, expire=3600)
        self.cache.close()
        return decoded_token

    def deserialize_auth_token(self, secret_key, token):
        token_status = self.cache.get(token)
        if token_status is None:
            raise BadSignature("")

        serializer = Serializer(secret_key)
        return serializer.loads(token)
Пример #23
0
    def search(
        self,
        name=None,
        ip=None,
        hexadecimal=None,
        type="ANY",
        bailiwick=None,
        wildcard_left=None,
        wildcard_right=None,
        inverse=False,
        sort=True,
        return_limit=10000,
        remote_limit=50000,
        epoch=False,
        time_first_before=None,
        time_first_after=None,
        time_last_before=None,
        time_last_after=None,
    ):
        """
        A method of the DNSDB Class to search the DNSDB API.

        :param name: string (required)
            fully qualified domain name
        :param ip: string
            IPv4 or IPv6 address, CIDR notation is valid
        :param hexadecimal: string
            hexadecimal digits specifying a raw octet string
        :param type: string (optional: default="ANY")
            dns resource record types (ANY, A, MX, SIG, etc)
        :param bailiwick: string (optional: default=None)
            a label in a fqdn, not valid for inverse queries
        :param wildcard_left: Boolean (optional: default=None)
            wildcard search to the left of a dot in a domain name
        :param wildcard_right: Boolean (optional: default=None)
            wildcard search to the right of a dot in a domain name
        :param inverse: boolean (optional: default=False)
            search for names resolving to names (e.g. MX, NS, CNAME, etc)
            only valid when used with name
        :param sort: boolean (optional: default=True)
        :param return_limit: integer (optional: default=10000)
        :param remote_limit: integer (optional: default=50000)
        :param epoch: boolean (optional: default=False)
        :param time_first_before:
        :param time_first_after:
        :param time_last_before:
        :param time_last_after:

        :return: Object
        """

        options = dict()

        options["name"] = name
        options["ip"] = ip
        options["hex"] = hexadecimal
        options["type"] = type
        options["bailiwick"] = bailiwick
        options["wildcard_left"] = wildcard_left
        options["wildcard_right"] = wildcard_right
        options["inverse"] = inverse
        options["sort"] = sort
        options["return_limit"] = return_limit
        options["remote_limit"] = remote_limit
        options["epoch"] = epoch
        options["time_first_before"] = time_first_before
        options["time_first_after"] = time_first_after
        options["time_last_before"] = time_last_before
        options["time_last_after"] = time_last_after
        options["api_key"] = self.api_key
        options["server"] = self.server
        options["cache"] = self.cache
        options["cache_location"] = self.cache_location
        options["cache_timeout"] = self.cache_timeout

        options = utils.pre_process(options)

        uri = utils.build_uri(options)

        if options["cache"] is True:
            cache = Cache(options["cache_location"])

            cached_result = cache.get(uri)

            if cached_result:
                data = json.loads(
                    gzip.decompress(cached_result).decode("utf-8"))
                results = Result(
                    records=data["records"],
                    status_code=data["status_code"],
                    error=data["error"],
                    quota=data["quota"],
                    cached=True,
                )
            else:
                results = _query(options, uri)
                if results.status_code == 200 or results.status_code == 404:
                    compressed = Result.to_compressed(results)
                    cache.set(uri, compressed, expire=options["cache_timeout"])
        else:
            results = _query(options, uri)

        if results.status_code == 200:
            results = utils.post_process(options, results)
            return results

        return results
Пример #24
0
class CloudFS(Operations):
    '''Baidu netdisk filesystem'''
    def __init__(self, mainArgs, *args, **kw):
        logger.info(colored("- fuse 4 cloud driver -", 'red'))
        self.buffer = Cache('./cache/buffer-batchmeta')
        self.dir_buffer = Cache('./cache/dir_buffer-buffer-batchmeta')

        self.attr_requesting = Cache('./cache/attr-requesting')
        self.mainArgs = mainArgs

        self.traversed_folder = Cache('./cache/traversed-folder')
        self.disk = PCS(self.mainArgs)

        self.createLock = Lock()
        self.attrLock = Lock()

        self.writing_files = {}
        self.downloading_files = {}

        logger.info(f'mainArgs:{mainArgs}')

        q = json.loads(self.disk.quota())

        # only request once
        try:
            self.total_size = q['quota']
            self.used = q['used']
        except Exception as e:
            self.total_size = 100000000000
            self.used = 0
            logger.exception(e)
            logger.debug(f'con`t load quota api, fall back to default')

        self.avail = self.total_size - self.used

        if mainArgs.debug:
            logger.setLevel(logging.DEBUG)
            logger.debug(colored("- debug mode -", 'red'))
            logger.debug(
                colored("- cach would not be the same after restart -", 'red'))
            self.buffer = Cache('./cache/buffer-batchmeta' + str(time.time()))
            self.dir_buffer = Cache('./cache/dir_buffer-buffer-batchmeta' +
                                    str(time.time()))
            self.traversed_folder = Cache('./cache/traversed-folder' +
                                          str(time.time()))
            self._readDirAsync("/", 2, dirReaderDaemon)
        else:
            logger.setLevel(logging.INFO)

            # update all folder  in other thread
            self._readDirAsync("/", mainArgs.preload_level, dirReaderDaemon)

    @staticmethod
    def add_write_permission(st, permission='u'):
        """Add `w` (`write`) permission to specified targets."""
        mode_map = {
            'u': stat.S_IWUSR,
            'g': stat.S_IWGRP,
            'o': stat.S_IWOTH,
            'a': stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
        }
        logger.info(
            f'-------------------{type(stat.S_IWUSR)} ,{type(st["st_mode"])}')
        for t in permission:
            st['st_mode'] |= mode_map[t]

        return st

    def _baidu_file_attr_convert(self, path, file_info):
        foo = fileAttr.copy()
        try:
            foo['st_ctime'] = file_info[
                'local_ctime'] if 'local_ctime' in file_info else file_info[
                    'ctime']
            foo['st_mtime'] = file_info[
                'local_mtime'] if 'local_mtime' in file_info else file_info[
                    'mtime']
            foo['st_mode'] = 16877 if file_info['isdir'] else 36279
            foo['st_nlink'] = 2 if file_info['isdir'] else 1
            foo['st_size'] = int(
                file_info['size']) if 'size' in file_info else 0
            self.buffer[path] = foo
        except Exception as e:
            logger.debug(f'======================')
            logger.debug(f'add buffer error {e},{path}:{file_info}')

    def _del_file_from_buffer(self, path):
        self.buffer.pop(path)

    def _getRootAttr(self):
        path = "/"
        if path in self.buffer:
            return self.buffer[path]

        logger.debug(f'net root: {path}')
        jdata = json.loads(self.disk.meta([path]))

        f = fileAttr.copy()
        f["st_mode"] = 16877
        f["st_nlink"] = 2
        if 'error_code' in jdata and jdata["error_code"] != 0:
            logger.debug(f"error_code:{jdata}")
            #             logger.info(f'{error_map[str(jdata["error_code"])]} args: {path}')
            self.buffer.set(path, f, expire=60)
            return f

        if "list" not in jdata or len(jdata["list"]) == 0:
            logger.debug(f"{path} not list :{jdata}")
            self.buffer.set(path, f, expire=60)
            return f

        file_info = jdata["list"][0]
        self._baidu_file_attr_convert(path, file_info)
        return file_info

    @funcLog
    def getattr(self, path, fh=None):
        '''
        Returns a dictionary with keys identical to the stat C structure of
        stat(2).

        st_atime, st_mtime and st_ctime should be floats.

        NOTE: There is an incompatibility between Linux and Mac OS X
        concerning st_nlink of directories. Mac OS X counts all files inside
        the directory, while Linux counts only the subdirectories.
        '''
        if path in self.writing_files:
            return self.writing_files[path]

        if path.split("/")[-1].startswith("."):
            raise FuseOSError(errno.ENOENT)

        # special handle root Attr
        if path == "/":
            return self._getRootAttr()

        parentDir = os.path.dirname(path)
        if parentDir not in self.dir_buffer:
            self._readDir(parentDir, 1)

        if path in self.buffer:
            return self.buffer[path]

        raise FuseOSError(errno.ENOENT)

    @funcLog
    def truncate(self, path, length, fh=None):
        self.unlink(path)
        self.create(path, None)
        self.writing_files[path]["uploading_tmp"].truncate(length)

    def _readDirAsync(self, path, depth, p):
        p.submit(self._readDir, path, depth, p)

    def _readDir(self, path, depth=2, threadPool=pool):
        if path not in self.traversed_folder:
            self.traversed_folder.set(path,
                                      b'1',
                                      expire=self.mainArgs.cache_timeout)
            logger.debug(f'net dir {depth} - {path} ')
            try:
                foo = json.loads(self.disk.list_files(path))

                files = ['.', '..']
                if 'error_code' in foo and foo["error_code"] != 0:
                    logger.info(
                        f'{error_map[str(foo["error_code"])]} args: {path}')
                if "list" not in foo:
                    return

                depth -= 1
                for file in foo['list']:
                    if file['server_filename'].startswith("."):
                        continue
                    files.append(file['server_filename'])
                    #                 logger.debug(f'{file}')
                    self._baidu_file_attr_convert(file['path'], file)
                    if depth > 0:
                        if file['isdir']:
                            self._readDirAsync(file['path'], depth, threadPool)
#                             self._readDir(file['path'],depth,threadPool)

                self.dir_buffer[path] = files

            except Exception as s:
                logger.exception(s)

    @funcLog
    def readdir(self, path, offset):
        self._readDirAsync(path, 1, pool)
        if path in self.dir_buffer:
            for r in self.dir_buffer[path]:
                yield r
        else:
            files = ['.', '..']
            for r in files:
                yield r

    # @funcLog
    def open(self, path, flags):
        if path in self.writing_files:
            return 0
        # method does not have thread race problem, open by one thread only
        try:
            if path not in self.downloading_files:
                url = self.disk.getRestUrl(path)
                x = Task(url, mainArgs, path, self.disk)
                x.start()
                self.downloading_files[path] = x
        except Baidu8Secs as e:
            logger.exception(e)
        except Exception as e:
            logger.exception(e)
        return 0

    def read(self, path, size, offset, fh):
        x = self.downloading_files[path]
        if x:
            data = x.get_cache(offset, size)

            filename = path[path.rfind("/") + 1:]
            if filename.startswith("enc."):
                if offset == 0:
                    if data and len(data) > encrpted_length:
                        data = bytes(
                            cipher(data, 0, encrpted_length,
                                   self.mainArgs.key))
                    else:
                        print("decrpt failed!")
            return data

        raise FuseOSError(errno.EIO)

    def updateCache(self, path, newValue):
        '''
        add     updateCache(path,value)
        delete  updateCache(path,None)
        udpate  updateCache(path,value)
        
        '''
        pass

    def updateCacheKeyOnly(self, old, new):
        '''
        delete     updateCacheKeyOnly(old,None)
        add/update updateCacheKeyOnly(old,new) 
        '''
        try:
            old_parent_dir = os.path.dirname(old)
            old_name = os.path.basename(old)
            if not new:
                oldCache = self.dir_buffer.get(old_parent_dir)
                # remove
                if oldCache:
                    if old_name in oldCache:
                        oldCache.remove(old_name)
                        self.dir_buffer[old_parent_dir] = oldCache
                    if old in self.buffer:
                        self.buffer.pop(old)
                else:
                    pass
            else:
                print("updateCache", old, new)
                oldCache = self.dir_buffer[old_parent_dir]
                new_parent_dir = os.path.dirname(new)
                if old_name in oldCache:
                    # dir old remove
                    oldCache.remove(old_name)
                    self.dir_buffer[old_parent_dir] = oldCache
                    # dir new add
                    newfilename = new[new.rfind("/") + 1:]
                    newCache = self.dir_buffer.get(new_parent_dir, [])
                    newCache.append(newfilename)
                    self.dir_buffer[new_parent_dir] = newCache

                if old in self.buffer:
                    old_info = self.buffer.pop(old)
                    self.buffer[new] = old_info
        except Exception as e:
            logger.info(e)

    def updateDir(self, old, new):
        pass

    def unlink(self, path):
        ''' 
        will only delete file
        '''
        print("unlink .....................")

        self.disk.delete([path])
        self.updateCacheKeyOnly(path, None)

    def rmdir(self, path):
        '''
        will only delete directory
        '''

        self.disk.delete([path])
        self.updateCacheKeyOnly(path, None)

    def access(self, path, amode):
        return 0

    def rename(self, old, new):
        '''
        will effect dir and file
        '''
        logger.info(f'rename {old}, {new}')
        self.disk.rename(old, new)
        self.updateCacheKeyOnly(old, new)

    @funcLog
    def mkdir(self, path, mode):
        logger.info(f'making dir {path}')

        r = json.loads(self.disk.mkdir(path))

        if 'error_code' in r:
            logger.info(f'{r}')
            logger.info(
                f'{error_map[str(r["error_code"])]} args: {path}, response:{r}'
            )
            return

        directory = path[:path.rfind("/")]
        filename = path[path.rfind("/") + 1:]

        cache = None
        if directory in self.dir_buffer:
            cache = self.dir_buffer[directory]
            cache.append(filename)
        self.dir_buffer[directory] = cache

        self._baidu_file_attr_convert(path, r)

    @funcLog
    def create(self, path, mode, fh=None):
        logger.debug(f'create {path}')
        with self.createLock:
            if path not in self.writing_files:
                attr = fileAttr.copy()
                t = time.time()

                attr['uploading_tmp'] = tempfile.NamedTemporaryFile('wb')
                attr['st_mode'] = attr[
                    'st_mode'] | stat.S_IFREG | stat.S_ISUID | stat.S_ISGID

                self.writing_files[path] = attr
            else:
                logger.debug(f'{path} is writing on, wait another turn..')
        return 0

    def flush(self, path, fh):
        with self.createLock:
            if path in self.writing_files:
                self.writing_files[path]["uploading_tmp"].flush()
        return 0

    def release(self, path, fh):
        with self.createLock:
            if path in self.writing_files:
                uploading_tmp = self.writing_files[path]['uploading_tmp']
                r = json.loads(self.disk.upload(uploading_tmp.name, path))
                logger.info(f'================================={r}')

                self.writing_files[path]['uploading_tmp'].close()
                #                 if path in self.buffer:
                #                     del self.buffer[path]

                if path in self.writing_files:
                    del self.writing_files[path]

                # why ? prevent accidently read file when uploading still in progress
                if path in self.downloading_files:
                    del self.downloading_files[path]

# update file
                self._baidu_file_attr_convert(path, r)

                # update parent dir
                parentDir = os.path.dirname(path)
                filename = path[path.rfind("/") + 1:]

                if parentDir in self.dir_buffer:
                    parentDirCache = self.dir_buffer[parentDir]
                    parentDirCache.append(filename)
                    self.dir_buffer[parentDir] = parentDirCache
                    logger.info(f'{self.dir_buffer[parentDir]}')

                print("released", path)
                return
        # method does not have thread race problem, release by one thread only
        if path in self.downloading_files:
            #             self.downloading_files[path].terminate()
            #             del self.downloading_files[path]
            #             uploading_tmp = "./uploading_tmp"+path
            #             logger.info("delete uploading_tmp:", uploading_tmp)
            #             os.remove(uploading_tmp)
            pass

    def write(self, path, data, offset, fp):

        filename = path[path.rfind("/") + 1:]
        if filename.startswith("enc."):
            if offset == 0 and data and len(data) > encrpted_length:
                data = bytes(
                    cipher(data, 0, encrpted_length, self.mainArgs.key))

        length = len(data)
        self.writing_files[path]["st_size"] += length
        self.writing_files[path]["uploading_tmp"].write(data)

        return length

    def chmod(self, path, mode):
        pass

    def statfs(self, path):

        # TODO read from cloud disk
        return {
            'f_bavail': int((self.avail) / 4096),
            'f_bfree': int((self.avail) / 4096),  # 相同的值  block
            'f_favail': 4290675908,
            'f_ffree': 4290675908,  # 相同的值  node
            'f_bsize': 104857,  # perferd value 
            'f_blocks': int(self.total_size / 8),
            'f_files': 4294967279,
            'f_flag': 0,
            'f_frsize': 4096,
            'f_namemax': 255
        }
Пример #25
0
class ReadCacheDataBackend(DataBackend):
    def __init__(self, config):
        read_cache_directory = config.get('dataBackend.readCache.directory',
                                          None,
                                          types=str)
        read_cache_maximum_size = config.get(
            'dataBackend.readCache.maximumSize', None, types=int)

        if read_cache_directory and not read_cache_maximum_size or not read_cache_directory and read_cache_maximum_size:
            raise ConfigurationError(
                'Both dataBackend.readCache.directory and dataBackend.readCache.maximumSize need to be set '
                + 'to enable disk based caching.')

        if read_cache_directory and read_cache_maximum_size:
            os.makedirs(read_cache_directory, exist_ok=True)
            try:
                self._read_cache = Cache(
                    read_cache_directory,
                    size_limit=read_cache_maximum_size,
                    eviction_policy='least-frequently-used',
                    statistics=1,
                )
            except Exception:
                logger.warning(
                    'Unable to enable disk based read caching. Continuing without it.'
                )
                self._read_cache = None
            else:
                logger.debug(
                    'Disk based read caching instantiated (cache size {}).'.
                    format(read_cache_maximum_size))
        else:
            self._read_cache = None
        self._use_read_cache = True

        # Start reader and write threads after the disk cached is created, so that they see it.
        super().__init__(config)

    def _read(self, block, metadata_only):
        key = self._block_uid_to_key(block.uid)
        metadata_key = key + self._META_SUFFIX
        if self._read_cache is not None and self._use_read_cache:
            metadata = self._read_cache.get(metadata_key)
            if metadata and metadata_only:
                return block, None, metadata
            elif metadata:
                data = self._read_cache.get(key)
                if data:
                    return block, data, metadata

        block, data, metadata = super()._read(block, metadata_only)

        # We always put blocks into the cache even when self._use_read_cache is False
        if self._read_cache is not None:
            self._read_cache.set(metadata_key, metadata)
            if not metadata_only:
                self._read_cache.set(key, data)

        return block, data, metadata

    def use_read_cache(self, enable):
        old_value = self._use_read_cache
        self._use_read_cache = enable
        return old_value

    def close(self):
        super().close()
        if self._read_cache is not None:
            (cache_hits, cache_misses) = self._read_cache.stats()
            logger.debug(
                'Disk based cache statistics (since cache creation): {} hits, {} misses.'
                .format(cache_hits, cache_misses))
            self._read_cache.close()
Пример #26
0
#!/usr/bin/env python
# coding: utf8


from diskcache import Cache
cache = Cache('/tmp/fzquant')

cache.set('abc','def')
Пример #27
0
class TorProjectClient:
    """Simple client to query torproject.org for exit nodes.

    The client will download https://check.torproject.org/exit-addresses
    and check if a specified IP address is present in it. If that IP address
    is found it will check for its last update time and return a description
    of the node if its last update time is less than `ttl` seconds ago.

    :param ttl: Tor node will be kept only if its last update was
                less than `ttl` seconds ago. Ignored if `ttl` is 0
    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file
                       downloaded from torproject.org
    :type ttl: int
    :type cache_duration: int
    :type cache_root: str
    """
    def __init__(self, ttl=86400, cache_duration=3600,
                 cache_root='/tmp/cortex/tor_project'):
        self.session = requests.Session()
        self.delta = None
        self.cache = None
        if ttl > 0:
            self.delta = timedelta(seconds=ttl)
        if cache_duration > 0:
            self.cache = Cache(cache_root)
            self.cache_duration = cache_duration
        self.url = 'https://check.torproject.org/exit-addresses'

    __cache_key = __name__ + ':raw_data'

    def _get_raw_data(self):
        try:
            return self.cache['raw_data']
        except(AttributeError, TypeError):
            return self.session.get(self.url).text
        except KeyError:
            self.cache.set(
                'raw_data',
                self.session.get(self.url).text,
                expire=self.cache_duration)
            return self.cache['raw_data']

    def search_tor_node(self, ip):
        """Lookup an IP address to check if it is a known tor exit node.

        :param ip: The IP address to lookup
        :type ip: str
        :return: Data relative to the tor node. If `ip`is a tor exit node
                 it will contain a `node` key with the hash of the node and
                 a `last_status` key with the last update time of the node.
                 If `ip` is not a tor exit node, the function will return an
                 empty dictionary.
        :rtype: dict
        """
        data = {}
        tmp = {}
        present = datetime.utcnow().replace(tzinfo=pytz.utc)
        for line in self._get_raw_data().splitlines():
            params = line.split(' ')
            if params[0] == 'ExitNode':
                tmp['node'] = params[1]
            elif params[0] == 'ExitAddress':
                tmp['last_status'] = params[2] + 'T' + params[3] + '+0000'
                last_status = parse(tmp['last_status'])
                if (self.delta is None or
                   (present - last_status) < self.delta):
                    data[params[1]] = tmp
                tmp = {}
            else:
                pass
        return data.get(ip, {})
Пример #28
0
class TorBlutmagieClient:
    """Simple client to query torstatus.blutmagie.de for exit nodes.

    The client will download http://torstatus.blutmagie.de/query_export.php/Tor_query_EXPORT.csv
    and check if a specified IP address, FQDN or domain is present in it.
    It will cache the response for `cache_duration` seconds to avoid
    too much latency.

    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file
                       downloaded from torstatus.blutmagie.de
    :type cache_duration: int
    :type cache_root: str
    """
    def __init__(self, cache_duration=3600, cache_root='/tmp/cortex/tor_project'):
        self.session = requests.Session()
        self.cache_duration = cache_duration
        if self.cache_duration > 0:
            self.cache = Cache(cache_root)
        self.url = 'http://torstatus.blutmagie.de/query_export.php/Tor_query_EXPORT.csv'

    __cache_key = __name__ + ':raw_data'

    def _get_raw_data(self):
        try:
            return self.cache[self.__cache_key]
        except (AttributeError, TypeError):
            return self.session.get(self.url).text.encode('utf-8')
        except KeyError:
            self.cache.set(
                self.__cache_key,
                self.session.get(self.url).text.encode('utf-8'),
                expire=self.cache_duration, read=True)
            return self.cache[self.__cache_key]

    def _get_data(self):
        return csv.DictReader(
            self._get_raw_data().decode('utf-8').splitlines(),
            delimiter=',')

    def _extract_fields(self, line):
        return {
            'hostname': line['Hostname'],
            'name': line['Router Name'],
            'country_code': line['Country Code'],
            'ip': line['IP Address'],
            'as_name': line['ASName'],
            'as_number': line['ASNumber']
        }

    def _get_node_from_domain(self, domain):
        results = []
        for line in self._get_data():
            if domain.lower() in line['Hostname'].lower():
                results.append(self._extract_fields(line))
        return results

    def _get_node_from_fqdn(self, fqdn):
        results = []
        for line in self._get_data():
            if fqdn.lower() == line['Hostname'].lower():
                results.append(self._extract_fields(line))
                break
        return results

    def _get_node_from_ip(self, ip):
        results = []
        for line in self._get_data():
            if ip == line['IP Address']:
                results.append(self._extract_fields(line))
                break
        return results

    def search_tor_node(self, data_type, data):
        """Lookup an artifact to check if it is a known tor exit node.

        :param data_type: The artifact type. Must be one of 'ip', 'fqdn'
                          or 'domain'
        :param data: The artifact to lookup
        :type data_type: str
        :type data: str
        :return: Data relative to the tor node. If the looked-up artifact is
                 related to a tor exit node it will contain a `nodes` array.
                 That array will contains a list of nodes containing the
                 following keys:
                 - name: name given to the router
                 - ip: their IP address
                 - hostname: Hostname of the router
                 - country_code: ISO2 code of the country hosting the router
                 - as_name: ASName registering the router
                 - as_number: ASNumber registering the router
                  Otherwise, `nodes` will be empty.
        :rtype: list
        """
        results = []
        if data_type == 'ip':
            results = self._get_node_from_ip(data)
        elif data_type == 'fqdn':
            results = self._get_node_from_fqdn(data)
        elif data_type == 'domain':
            results = self._get_node_from_domain(data)
        else:
            pass
        return {"nodes": results}
Пример #29
0
class AzureBlobTickstore(Tickstore):
    """
    Tickstore meant to run against Microsoft's Azure Blob Storage backend, e.g. for archiving purposes. Note this is
    not suitable for concurrent access to the blob because the index is loaded into memory on the local node and only
    written back to the blob on close. We may want to implement blob locking to at least prevent accidents.
    """

    logger = logging.getLogger(__name__)

    def __init__(self,
                 connect_str: str,
                 db_name: str,
                 cache_dir: Path = Path('/var/tmp/abs_lru_cache'),
                 timestamp_column: str = 'date'):
        self.storage = BlobServiceClient.from_connection_string(connect_str)
        self.container_name = db_name.replace('_', '-').lower()
        self.container_client = self.storage.get_container_client(
            self.container_name)
        self.cache_dir = cache_dir
        self.cache = Cache(str(self.cache_dir))
        self.index_path = self.cache_dir.joinpath(Path('index.h5'))
        self.timestamp_column = timestamp_column
        self.closed = False

        # try and create the container
        try:
            self.storage.create_container(self.container_name)
        except ResourceExistsError:
            self.logger.info(f'container {self.container_name} already exists')

        try:
            # fetch index from Azure
            blob_client = self.storage.get_blob_client(
                container=self.container_name, blob='index')
            index_blob = blob_client.download_blob()

            # write index into local cache dir
            with open(self.index_path, 'wb') as index_file:
                index_file.write(index_blob.readall())

            self.index = DataFrameIndex(Path(f'azure:{db_name}/'),
                                        Path(self.index_path), db_name)
        except ResourceNotFoundError:
            # clear out any previously-downloaded index first
            if self.index_path.exists():
                self.index_path.unlink()

            # create new, empty index on local disk (we'll upload this to Azure when we flush())
            self.index = DataFrameIndex(Path(f'azure:{db_name}/'),
                                        Path(self.index_path), db_name)

    # noinspection DuplicatedCode
    def select(
        self,
        symbol: str,
        start: datetime.datetime,
        end: datetime.datetime,
        as_of_time: datetime.datetime = BiTimestamp.latest_as_of
    ) -> pd.DataFrame:
        self._check_closed('select')

        # pass 1: grab the list of splays matching the start / end range that are valid for as_of_time
        selected = self.index.select(symbol, start.date(), end.date(),
                                     as_of_time)
        if selected.empty:
            return selected

        # load all ticks in range into memory
        loaded_dfs = []
        for index, row in selected.iterrows():
            logical_path = row['path']
            loaded_dfs.append(self.read(logical_path))

        # pass 2: select ticks matching the exact start/end timestamps
        # noinspection PyTypeChecker
        all_ticks = pd.concat(loaded_dfs)
        time_mask = (all_ticks.index.get_level_values(self.timestamp_column) >= start) \
            & (all_ticks.index.get_level_values(self.timestamp_column) <= end)

        # sort the ticks -- probably need to optimize this to sort on paths and sort ticks on ingest
        selected_ticks = all_ticks.loc[time_mask]
        selected_ticks.sort_index(inplace=True)
        return selected_ticks

    # noinspection PyTypeChecker
    def read(self, logical_path: str) -> pd.DataFrame:
        logical_prefix = f'azure:{self.container_name}'
        if logical_path.startswith(logical_prefix):
            blob_path = logical_path[len(logical_prefix) + 1:]
        else:
            # correction for bad data in index history
            blob_path = logical_path
            logical_path = f'{logical_prefix}/{logical_path}'

        cached_data = self.cache.get(logical_path)
        if cached_data is not None:
            self.logger.info(f'reading ticks from cache: {logical_path}')
            return pd.read_parquet(io.BytesIO(cached_data))
        else:
            blob_client = self.storage.get_blob_client(
                container=self.container_name, blob=blob_path)
            tick_blob = blob_client.download_blob()
            tick_data = tick_blob.readall()
            self.cache.set(logical_path, tick_data)
            return pd.read_parquet(io.BytesIO(tick_data))

    # noinspection PyArgumentList
    def insert(self, symbol: str, ts: BiTimestamp, ticks: pd.DataFrame):
        self._check_closed('insert')

        # encode DataFrame as parquet byte stream
        table = pyarrow.Table.from_pandas(df=ticks)
        buf = pyarrow.BufferOutputStream()
        pyarrow.parquet.write_table(table, buf)

        # determine latest version number
        # compose a splay path based on YYYY/MM/DD, symbol and version and pass in as a functor
        # so it can be populated with the bitemporal version
        as_at_date = ts.as_at()
        logical_prefix = f'azure:{self.container_name}'

        def create_write_path(version: int):
            path = f'{as_at_date.year}/{as_at_date.month:02d}/{as_at_date.day:02d}/{symbol}_{version:04d}.h5'
            return f'{logical_prefix}/{path}'

        # insert into local copy of index
        data_path = self.index.insert(symbol, as_at_date, create_write_path)
        base_path = data_path[len(logical_prefix) + 1:]

        # update cache
        data_bytes = bytes(buf.getvalue())
        self.cache.set(data_path, data_bytes)

        # upload to Azure
        self.logger.info(f'uploading to Azure: {base_path}')
        blob_client = self.storage.get_blob_client(
            container=self.container_name, blob=str(base_path))
        try:
            blob_client.upload_blob(io.BytesIO(data_bytes), overwrite=False)
        except ResourceExistsError:
            self.logger.info(f'skipping upload of existing blob: {data_path}')

    def delete(self, symbol: str, ts: BiTimestamp):
        self._check_closed('delete')
        self.index.delete(symbol, ts.as_at())

    def flush(self):
        # sync index to disk
        self.index.flush()

        # upload index to Azure blob storage
        blob_client = self.storage.get_blob_client(
            container=self.container_name, blob='index')
        with open(self.index_path, 'rb') as index_data:
            blob_client.upload_blob(index_data, overwrite=True)

    def close(self):
        if not self.closed:
            self.flush()
            self.closed = True

    def _check_closed(self, operation):
        if self.closed:
            raise Exception('unable to perform operation while closed: ' +
                            operation)

    def destroy(self):
        pass
Пример #30
0
class TorBlutmagieClient:
    """Simple client to query torstatus.blutmagie.de for exit nodes.

    The client will download http://torstatus.blutmagie.de/query_export.php/Tor_query_EXPORT.csv
    and check if a specified IP address, FQDN or domain is present in it.
    It will cache the response for `cache_duration` seconds to avoid
    too much latency.

    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file
                       downloaded from torstatus.blutmagie.de
    :type cache_duration: int
    :type cache_root: str
    """
    def __init__(self,
                 cache_duration=3600,
                 cache_root='/tmp/cortex/tor_project'):
        self.session = requests.Session()
        self.cache_duration = cache_duration
        if self.cache_duration > 0:
            self.cache = Cache(cache_root)
        self.url = 'http://torstatus.blutmagie.de/query_export.php/Tor_query_EXPORT.csv'

    __cache_key = __name__ + ':raw_data'

    def _get_raw_data(self):
        try:
            return self.cache[self.__cache_key]
        except (AttributeError, TypeError):
            return self.session.get(self.url).text.encode('utf-8')
        except KeyError:
            self.cache.set(self.__cache_key,
                           self.session.get(self.url).text.encode('utf-8'),
                           expire=self.cache_duration,
                           read=True)
            return self.cache[self.__cache_key]

    def _get_data(self):
        return csv.DictReader(
            self._get_raw_data().decode('utf-8').splitlines(), delimiter=',')

    def _extract_fields(self, line):
        return {
            'hostname': line['Hostname'],
            'name': line['Router Name'],
            'country_code': line['Country Code'],
            'ip': line['IP Address'],
            'as_name': line['ASName'],
            'as_number': line['ASNumber']
        }

    def _get_node_from_domain(self, domain):
        results = []
        for line in self._get_data():
            if domain.lower() in line['Hostname'].lower():
                results.append(self._extract_fields(line))
        return results

    def _get_node_from_fqdn(self, fqdn):
        results = []
        for line in self._get_data():
            if fqdn.lower() == line['Hostname'].lower():
                results.append(self._extract_fields(line))
                break
        return results

    def _get_node_from_ip(self, ip):
        results = []
        for line in self._get_data():
            if ip == line['IP Address']:
                results.append(self._extract_fields(line))
                break
        return results

    def search_tor_node(self, data_type, data):
        """Lookup an artifact to check if it is a known tor exit node.

        :param data_type: The artifact type. Must be one of 'ip', 'fqdn'
                          or 'domain'
        :param data: The artifact to lookup
        :type data_type: str
        :type data: str
        :return: Data relative to the tor node. If the looked-up artifact is
                 related to a tor exit node it will contain a `nodes` array.
                 That array will contains a list of nodes containing the
                 following keys:
                 - name: name given to the router
                 - ip: their IP address
                 - hostname: Hostname of the router
                 - country_code: ISO2 code of the country hosting the router
                 - as_name: ASName registering the router
                 - as_number: ASNumber registering the router
                  Otherwise, `nodes` will be empty.
        :rtype: list
        """
        results = []
        if data_type == 'ip':
            results = self._get_node_from_ip(data)
        elif data_type == 'fqdn':
            results = self._get_node_from_fqdn(data)
        elif data_type == 'domain':
            results = self._get_node_from_domain(data)
        else:
            pass
        return {"nodes": results}
Пример #31
0
class LookupService(ABC):
    """
    Abstract model for a lookup service.
    """
    def __init__(self, config: LookupServiceConfig):
        self._config = config
        self._cache = Cache(
            os.path.join(os.path.dirname(__file__), '.cache',
                         self.__class__.__name__, self._config.cache_dir()))

    @property
    def max_hits(self):
        return self._config.max_hits

    def _update_cache(self, results: List[LookupResult]):
        """
        Update cache entries with new results
        :param results: a list of LookupResults
        :return:
        """
        for lookup_result in results:
            self._cache.set(lookup_result.label,
                            lookup_result)  # ALWAYS override!

    def _get_cached_entries(
            self, labels: List[str]) -> Tuple[List[LookupResult], List[str]]:
        """
        Return a tuple (<cached results>, <labels to lookup>)
        :param labels: a list of strings to lookup
        :return:
        """
        to_compute = []
        cached_entries = []

        for label in labels:
            entry = self._cache.get(label)
            if entry is None:
                to_compute.append(label)
            else:
                cached_entries.append(entry)

        return cached_entries, to_compute

    def _lookup(self, labels: List[str]) -> List[LookupResult]:
        """
        Actual lookup function. To be implemented by all the subclasses.
        :param labels:
        :return:
        """
        raise NotImplementedError

    def lookup(self, labels: List[str]) -> List[LookupResult]:
        """
        Returns a list of LookupResult, based on the given labels.
        Results are fetched from a local cache, if enabled. If not enabled, already cached results (if any)
        will be updated with the new results.
        :param labels: a list of strings to lookup
        :return: a list of LookupResult
        """
        cached_entries, to_compute = [], labels

        if self._config.enable_cache:
            cached_entries, to_compute = self._get_cached_entries(to_compute)

        new_results = self._lookup(to_compute)
        self._update_cache(new_results)  # write new entries to cache

        return cached_entries + new_results

    def lookup_subsequences(self, labels: List[str],
                            max_subseq_len) -> List[LookupResult]:
        """
        Given a label, the method searches for all its subsequences (from length max_subseq_len to 1)
        Results are then aggregated, keeping the original rank.
        :param labels: a list of labels
        :param max_subseq_len: length of the longest subsequence to compute
        :return: a list of LookupResult
        """
        subsequences_dict, subsequences_set = strings_subsequences(
            labels, max_subseq_len)
        lookup_results = dict(self.lookup(list(subsequences_set)))
        results = []
        for label in labels:  # aggregate subsequences results for each label
            label_results = []
            for subsequence in subsequences_dict[label]:
                label_results += lookup_results[subsequence]
            results.append(LookupResult(label, label_results))
        return results
Пример #32
0
class URLhaus:
    """Simple client to query URLhaus by abuse.ch.
    :param query: domain, url or hash.
    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file.
    :type query: string
    :type cache_duration: int
    :type cache_root: str
    """
    def __init__(self,
                 query,
                 cache_duration=300,
                 cache_root="/tmp/cortex/URLhaus"):
        self.URL = "https://urlhaus.abuse.ch/browse.php"
        self.query = query
        self.cache = None
        if cache_duration > 0:
            self.cache = Cache(cache_root)
            self.cache_duration = cache_duration

    def _get_raw_data(self):
        try:
            return self.cache[self.query.encode('utf-8')]
        except (AttributeError, TypeError):
            return self.fetch()
        except KeyError:
            self.cache.set(self.query.encode('utf-8'),
                           self.fetch(),
                           expire=self.cache_duration)
            return self.cache[self.query.encode('utf-8')]

    def search(self):
        res = self._get_raw_data()
        return self.parse(res)

    def fetch(self):
        payload = {"search": self.query}
        return requests.get(self.URL, params=payload).text

    def parse(self, doc):
        results = []
        soup = BeautifulSoup(doc, "html.parser")
        table = soup.find("table", class_="table")
        rows = table.find_all("tr")[1:]
        for row in rows:
            cols = row.find_all("td")
            results.append({
                "dateadded":
                cols[0].text,
                "malware_url":
                cols[1].text,
                "link":
                cols[1].find("a").attrs.get("href"),
                "status":
                cols[2].text,
                "tags":
                cols[3].text.split(),
                "gsb":
                cols[4].text if len(cols) > 5 else None,
                "reporter":
                cols[5].text if len(cols) > 5 else cols[4].text
            })
        return results
Пример #33
0
class TorProjectClient:
    """Simple client to query torproject.org for exit nodes.

    The client will download https://check.torproject.org/exit-addresses
    and check if a specified IP address is present in it. If that IP address
    is found it will check for its last update time and return a description
    of the node if its last update time is less than `ttl` seconds ago.

    :param ttl: Tor node will be kept only if its last update was
                less than `ttl` seconds ago. Ignored if `ttl` is 0
    :param cache_duration: Duration before refreshing the cache (in seconds).
                           Ignored if `cache_duration` is 0.
    :param cache_root: Path where to store the cached file
                       downloaded from torproject.org
    :param proxies: Proxies to be using during requests session
    :type ttl: int
    :type cache_duration: int
    :type cache_root: str
    """

    def __init__(
        self,
        ttl=86400,
        cache_duration=3600,
        cache_root="/tmp/cortex/tor_project",
        proxies=None,
    ):
        self.session = requests.Session()
        if proxies:
            self.session.proxies.update(proxies)
        self.delta = None
        self.cache = None
        if ttl > 0:
            self.delta = timedelta(seconds=ttl)
        if cache_duration > 0:
            self.cache = Cache(cache_root)
            self.cache_duration = cache_duration
        self.url = "https://check.torproject.org/exit-addresses"

    __cache_key = __name__ + ":raw_data"

    def _get_raw_data(self):
        try:
            return self.cache["raw_data"]
        except (AttributeError, TypeError):
            return self.session.get(self.url).text
        except KeyError:
            self.cache.set(
                "raw_data",
                self.session.get(self.url).text,
                expire=self.cache_duration,
            )
            return self.cache["raw_data"]

    def search_tor_node(self, ip):
        """Lookup an IP address to check if it is a known tor exit node.

        :param ip: The IP address to lookup
        :type ip: str
        :return: Data relative to the tor node. If `ip`is a tor exit node
                 it will contain a `node` key with the hash of the node and
                 a `last_status` key with the last update time of the node.
                 If `ip` is not a tor exit node, the function will return an
                 empty dictionary.
        :rtype: dict
        """
        data = {}
        tmp = {}
        present = datetime.utcnow().replace(tzinfo=pytz.utc)
        for line in self._get_raw_data().splitlines():
            params = line.split(" ")
            if params[0] == "ExitNode":
                tmp["node"] = params[1]
            elif params[0] == "ExitAddress":
                tmp["last_status"] = params[2] + "T" + params[3] + "+0000"
                last_status = parse(tmp["last_status"])
                if self.delta is None or (present - last_status) < self.delta:
                    data[params[1]] = tmp
                tmp = {}
            else:
                pass
        return data.get(ip, {})
Пример #34
0
default_flask_logger = logging.getLogger('werkzeug')
default_flask_logger.setLevel(logging.CRITICAL)

from publicsuffixlist import PublicSuffixList
from domain_stats.config import Config
from domain_stats.network_io import IscConnection
import domain_stats.rdap_query as rdap
from domain_stats.freq import FreqCounter
from domain_stats.expiring_diskcache import ExpiringCache
from diskcache import Cache

#current directory must be set by launcher to the location of the config and database
cache = ExpiringCache(os.getcwd())
memocache = Cache(pathlib.Path().cwd() / "memocache")
if not memocache.get("rdap_good"):
    memocache.set("rdap_good", 0)
    memocache.set("rdap_fail", 0)
config = Config(os.getcwd() + "/domain_stats.yaml")

if config.get("enable_freq_scores"):
    freq = FreqCounter()
    freq.load(config.get("freq_table"))

logfile = logging.FileHandler(str(pathlib.Path.cwd() / 'domain_stats.log'))
logformat = logging.Formatter(
    '%(asctime)s : %(levelname)s : %(module)s : %(process)d : %(message)s')
logfile.setFormatter(logformat)
if config['log_detail'] == 0:
    log.setLevel(level=logging.CRITICAL)
elif config['log_detail'] == 1:
    log.addHandler(logfile)
Пример #35
0
class CacheInteraction:
    def __init__(self, dimension=DimensionType.DIM_2D):
        self._cache = Cache(settings.CACHE_ROOT)
        self._dimension = dimension

    def __del__(self):
        self._cache.close()

    def get_buff_mime(self, chunk_number, quality, db_data):
        chunk, tag = self._cache.get('{}_{}_{}'.format(db_data.id,
                                                       chunk_number, quality),
                                     tag=True)

        if not chunk:
            chunk, tag = self.prepare_chunk_buff(db_data, quality,
                                                 chunk_number)
            self.save_chunk(db_data.id, chunk_number, quality, chunk, tag)
        return chunk, tag

    def prepare_chunk_buff(self, db_data, quality, chunk_number):
        from cvat.apps.engine.frame_provider import FrameProvider  # TODO: remove circular dependency
        writer_classes = {
            FrameProvider.Quality.COMPRESSED:
            Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type
            == DataChoice.VIDEO else ZipCompressedChunkWriter,
            FrameProvider.Quality.ORIGINAL:
            Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO
            else ZipChunkWriter,
        }

        image_quality = 100 if writer_classes[quality] in [
            Mpeg4ChunkWriter, ZipChunkWriter
        ] else db_data.image_quality
        mime_type = 'video/mp4' if writer_classes[quality] in [
            Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter
        ] else 'application/zip'

        kwargs = {}
        if self._dimension == DimensionType.DIM_3D:
            kwargs["dimension"] = DimensionType.DIM_3D
        writer = writer_classes[quality](image_quality, **kwargs)

        images = []
        buff = BytesIO()
        upload_dir = {
            StorageChoice.LOCAL: db_data.get_upload_dirname(),
            StorageChoice.SHARE: settings.SHARE_ROOT,
            StorageChoice.CLOUD_STORAGE: db_data.get_upload_dirname(),
        }[db_data.storage]
        if hasattr(db_data, 'video'):
            source_path = os.path.join(upload_dir, db_data.video.path)

            reader = VideoDatasetManifestReader(
                manifest_path=db_data.get_manifest_path(),
                source_path=source_path,
                chunk_number=chunk_number,
                chunk_size=db_data.chunk_size,
                start=db_data.start_frame,
                stop=db_data.stop_frame,
                step=db_data.get_frame_step())
            for frame in reader:
                images.append((frame, source_path, None))
        else:
            reader = ImageDatasetManifestReader(
                manifest_path=db_data.get_manifest_path(),
                chunk_number=chunk_number,
                chunk_size=db_data.chunk_size,
                start=db_data.start_frame,
                stop=db_data.stop_frame,
                step=db_data.get_frame_step())
            if db_data.storage == StorageChoice.CLOUD_STORAGE:
                db_cloud_storage = db_data.cloud_storage
                assert db_cloud_storage, 'Cloud storage instance was deleted'
                credentials = Credentials()
                credentials.convert_from_db({
                    'type':
                    db_cloud_storage.credentials_type,
                    'value':
                    db_cloud_storage.credentials,
                })
                details = {
                    'resource':
                    db_cloud_storage.resource,
                    'credentials':
                    credentials,
                    'specific_attributes':
                    db_cloud_storage.get_specific_attributes()
                }
                try:
                    cloud_storage_instance = get_cloud_storage_instance(
                        cloud_provider=db_cloud_storage.provider_type,
                        **details)
                    cloud_storage_instance.initialize_content()
                    for item in reader:
                        file_name = f"{item['name']}{item['extension']}"
                        if file_name not in cloud_storage_instance:
                            raise Exception(
                                '{} file was not found on a {} storage'.format(
                                    file_name, cloud_storage_instance.name))
                        with NamedTemporaryFile(mode='w+b',
                                                prefix='cvat',
                                                suffix=file_name.replace(
                                                    os.path.sep, '#'),
                                                delete=False) as temp_file:
                            source_path = temp_file.name
                            buf = cloud_storage_instance.download_fileobj(
                                file_name)
                            temp_file.write(buf.getvalue())
                            checksum = item.get('checksum', None)
                            if not checksum:
                                slogger.cloud_storage[
                                    db_cloud_storage.id].warning(
                                        'A manifest file does not contain checksum for image {}'
                                        .format(item.get('name')))
                            if checksum and not md5_hash(
                                    source_path) == checksum:
                                slogger.cloud_storage[
                                    db_cloud_storage.id].warning(
                                        'Hash sums of files {} do not match'.
                                        format(file_name))
                            images.append((source_path, source_path, None))
                except Exception as ex:
                    storage_status = cloud_storage_instance.get_status()
                    if storage_status == Status.FORBIDDEN:
                        msg = 'The resource {} is no longer available. Access forbidden.'.format(
                            cloud_storage_instance.name)
                    elif storage_status == Status.NOT_FOUND:
                        msg = 'The resource {} not found. It may have been deleted.'.format(
                            cloud_storage_instance.name)
                    else:
                        # check status of last file
                        file_status = cloud_storage_instance.get_file_status(
                            file_name)
                        if file_status == Status.NOT_FOUND:
                            raise Exception(
                                "'{}' not found on the cloud storage '{}'".
                                format(file_name, cloud_storage_instance.name))
                        elif file_status == Status.FORBIDDEN:
                            raise Exception(
                                "Access to the file '{}' on the '{}' cloud storage is denied"
                                .format(file_name,
                                        cloud_storage_instance.name))
                        msg = str(ex)
                    raise Exception(msg)
            else:
                for item in reader:
                    source_path = os.path.join(
                        upload_dir, f"{item['name']}{item['extension']}")
                    images.append((source_path, source_path, None))
        writer.save_as_chunk(images, buff)
        buff.seek(0)
        if db_data.storage == StorageChoice.CLOUD_STORAGE:
            images = [image[0] for image in images if os.path.exists(image[0])]
            for image_path in images:
                os.remove(image_path)
        return buff, mime_type

    def save_chunk(self, db_data_id, chunk_number, quality, buff, mime_type):
        self._cache.set('{}_{}_{}'.format(db_data_id, chunk_number, quality),
                        buff,
                        tag=mime_type)