예제 #1
0
    MappingNotAllowed,
    ValidatingNotAllowed,
    MappingPermission,
    ValidationPermission,
    TeamRoles,
)
from backend.models.postgis.task import Task, TaskHistory
from backend.models.postgis.utils import NotFound
from backend.services.users.user_service import UserService
from backend.services.project_search_service import ProjectSearchService
from backend.services.project_admin_service import ProjectAdminService
from backend.services.team_service import TeamService
from sqlalchemy import func, or_
from sqlalchemy.sql.expression import true

summary_cache = TTLCache(maxsize=1024, ttl=600)


class ProjectServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when handling projects """

    def __init__(self, message):
        if current_app:
            current_app.logger.error(message)


class ProjectService:
    @staticmethod
    def get_project_by_id(project_id: int) -> Project:
        project = Project.get(project_id)
        if project is None:
예제 #2
0
class StoryscriptHub:
    update_thread = None

    retry_lock = Lock()
    update_lock = Lock()

    ttl_cache_for_services = TTLCache(maxsize=128, ttl=1 * 60)
    ttl_cache_for_service_names = TTLCache(maxsize=1, ttl=1 * 60)

    @staticmethod
    def get_config_dir(app):
        if sys.platform == 'win32':
            p = os.getenv('APPDATA')
        else:
            p = os.getenv('XDG_DATA_HOME', os.path.expanduser('~/'))

        return os.path.join(p, app)

    def __init__(self, db_path: str = None, auto_update: bool = True,
                 service_wrapper=False):
        """
        StoryscriptHub - a utility to access Storyscript's hub service data.

        :param db_path: The path for the database caching file
        :param auto_update: Will automatically pull services from the hub
        every 30 seconds
        :param service_wrapper: Allows you to utilize safe ServiceData objects
        """

        if db_path is None:
            db_path = StoryscriptHub.get_config_dir('.storyscript')

        os.makedirs(db_path, exist_ok=True)

        self.db_path = db_path

        self._service_wrapper = None
        if service_wrapper:
            self._service_wrapper = ServiceWrapper()
            # we need to update the cache immediately for the
            # service wrapper to initialize data.
            self.update_cache()

        if auto_update:
            self.update_thread = AutoUpdateThread(
                update_function=self.update_cache)

    @cached(cache=ttl_cache_for_service_names)
    def get_all_service_names(self) -> [str]:
        """
        Get all service names and aliases from the database.

        :return: An array of strings, which might look like:
        ["hello", "universe/hello"]
        """
        services = []
        with Database(self.db_path):
            for s in Service.select(Service.name, Service.alias,
                                    Service.username):
                if s.alias:
                    services.append(s.alias)

                services.append(f'{s.username}/{s.name}')

        return services

    @cached(cache=ttl_cache_for_services)
    def get(self, alias=None, owner=None, name=None,
            wrap_service=False) -> Union[Service, ServiceData]:
        """
        Get a service from the database.

        :param alias: Takes precedence when specified over owner/name
        :param owner: The owner of the service
        :param name: The name of the service
        :param wrap_service: When set to true, it will return a
        @ServiceData object
        :return: Returns a Service instance, with all fields populated
        """

        service = None

        # check if the service_wrapper was initialized for automatic
        # wrapping
        if self._service_wrapper is not None:
            service = self._service_wrapper.get(alias=alias, owner=owner,
                                                name=name)

        if service is None:
            service = self._get(alias, owner, name)

        if service is None:
            # Maybe it's new in the Hub?
            with self.retry_lock:
                service = self._get(alias, owner, name)
                if service is None:
                    self.update_cache()
                    service = self._get(alias, owner, name)

        if service is not None:
            # ensures test don't break
            if isinstance(service, MagicMock):
                return service

            assert isinstance(service, Service) or \
                isinstance(service, ServiceData)
            # if the service wrapper is set, and the service doesn't exist
            # we can safely convert this object since it was probably loaded
            # from the cache
            if wrap_service or self._service_wrapper is not None:
                return ServiceData.from_dict(data={
                    "service_data": json.loads(service.raw_data)
                })

            if service.topics is not None:
                service.topics = json.loads(service.topics)

            if service.configuration is not None:
                service.configuration = json.loads(service.configuration)

        return service

    def _get(self, alias: str = None, owner: str = None, name: str = None):
        try:
            if alias is not None and alias.count("/") == 1:
                owner, name = alias.split("/")
                alias = None

            with Database(self.db_path):
                if alias:
                    service = Service.select().where(Service.alias == alias)
                else:
                    service = Service.select().where(
                        (Service.username == owner) & (Service.name == name))

                return service.get()
        except DoesNotExist:
            return None

    def update_cache(self):
        services = GraphQL.get_all()

        # tell the service wrapper to reload any services from the cache.
        if self._service_wrapper is not None:
            self._service_wrapper.reload_services(services)

        with Database(self.db_path) as db:
            with db.atomic(lock_type='IMMEDIATE'):
                Service.delete().execute()
                for service in services:
                    Service.create(
                        service_uuid=service['serviceUuid'],
                        name=service['service']['name'],
                        alias=service['service']['alias'],
                        username=service['service']['owner']['username'],
                        description=service['service']['description'],
                        certified=service['service']['isCertified'],
                        public=service['service']['public'],
                        topics=json.dumps(service['service']['topics']),
                        state=service['state'],
                        configuration=json.dumps(service['configuration']),
                        readme=service['readme'],
                        raw_data=json.dumps(service))

        with self.update_lock:
            self.ttl_cache_for_service_names.clear()
            self.ttl_cache_for_services.clear()

        return True
예제 #3
0
class GsDataApi(DataApi):
    __definitions = {}
    DEFAULT_SCROLL = '30s'

    # DataApi interface

    @classmethod
    def query_data(cls, query: DataQuery, dataset_id: str = None, asset_id_type: Union[GsIdType, str] = None) \
            -> Union[MDAPIDataBatchResponse, DataQueryResponse, tuple]:
        if query.marketDataCoordinates:
            # Don't use MDAPIDataBatchResponse for now - it doesn't handle quoting style correctly
            results: Union[MDAPIDataBatchResponse,
                           dict] = GsSession.current._post(
                               '/data/coordinates/query', payload=query)
            if isinstance(results, dict):
                return results.get('responses', ())
            else:
                return results.responses if results.responses is not None else (
                )
        if query.where:
            where = query.where.as_dict()
            xref_keys = set(where.keys()).intersection(XRef.properties())
            if xref_keys:
                # Check that assetId is a symbol dimension of this data set. If not, we need to do a separate query
                # to resolve xref --> assetId
                if len(xref_keys) > 1:
                    raise MqValueError(
                        'Cannot not specify more than one type of asset identifier'
                    )

                definition = cls.get_definition(dataset_id)

                sd = definition.dimensions.symbolDimensions
                if definition.parameters.symbolStrategy == 'MDAPI' or (
                        'assetId' not in sd and 'gsid' not in sd):
                    xref_type = min(xref_keys)
                    if asset_id_type is None:
                        asset_id_type = xref_type

                    xref_values = where[asset_id_type]
                    xref_values = (xref_values, ) if isinstance(
                        xref_values, str) else xref_values
                    asset_id_map = GsAssetApi.map_identifiers(
                        xref_type, GsIdType.id, xref_values)

                    if len(asset_id_map) != len(xref_values):
                        raise MqValueError(
                            'Not all {} were resolved to asset Ids'.format(
                                asset_id_type))

                    setattr(query.where, xref_type, None)
                    query.where.assetId = [
                        asset_id_map[x] for x in xref_values
                    ]

        results: Union[DataQueryResponse, dict] = GsSession.current._post(
            '/data/{}/query'.format(dataset_id), payload=query)
        if isinstance(results, dict):
            results = results.get('data', ())
        else:
            results = results.data if results.data is not None else ()

        if asset_id_type not in {GsIdType.id, None}:
            asset_ids = tuple(
                set(filter(None, (r.get('assetId') for r in results))))
            if asset_ids:
                xref_map = GsAssetApi.map_identifiers(GsIdType.id,
                                                      asset_id_type, asset_ids)

                if len(xref_map) != len(asset_ids):
                    raise MqValueError(
                        'Not all asset Ids were resolved to {}'.format(
                            asset_id_type))

                for result in results:
                    result[asset_id_type] = xref_map[result['assetId']]

        return results

    @classmethod
    def last_data(cls,
                  query: DataQuery,
                  dataset_id: str = None) -> Union[list, tuple]:
        if query.marketDataCoordinates:
            result = GsSession.current._post('/data/coordinates/query/last',
                                             payload=query)
            return result.get('responses', ())
        else:
            result = GsSession.current._post(
                '/data/{}/last/query'.format(dataset_id), payload=query)
            return result.get('data', ())

    @classmethod
    def symbol_dimensions(cls, dataset_id: str) -> tuple:
        definition = cls.get_definition(dataset_id)
        return definition.dimensions.symbolDimensions

    @classmethod
    def time_field(cls, dataset_id: str) -> str:
        definition = cls.get_definition(dataset_id)
        return definition.dimensions.timeField

    # GS-specific functionality

    @classmethod
    def get_coverage(cls,
                     dataset_id: str,
                     scroll: str = DEFAULT_SCROLL,
                     scroll_id: Optional[str] = None,
                     limit: int = None,
                     offset: int = None,
                     fields: List[str] = None) -> List[dict]:
        params = {}
        if scroll:
            params['scroll'] = scroll

        if scroll_id:
            params['scrollId'] = scroll_id

        if not limit:
            limit = 4000
        params['limit'] = limit

        if offset:
            params['offset'] = offset

        if fields:
            params['fields'] = fields

        body = GsSession.current._get('/data/{}/coverage'.format(dataset_id),
                                      payload=params)
        results = body['results']
        if len(results) > 0 and 'scrollId' in body:
            return results + cls.get_coverage(dataset_id,
                                              scroll_id=body['scrollId'],
                                              scroll=GsDataApi.DEFAULT_SCROLL,
                                              limit=limit)
        else:
            return results

    @classmethod
    def create(cls, definition: Union[DataSetEntity, dict]) -> DataSetEntity:
        result = GsSession.current._post('/data/datasets', payload=definition)
        return result

    @classmethod
    def update_definition(
            cls, dataset_id: str, definition: Union[DataSetEntity,
                                                    dict]) -> DataSetEntity:
        result = GsSession.current._put('/data/datasets/{}'.format(dataset_id),
                                        payload=definition,
                                        cls=DataSetEntity)
        return result

    @classmethod
    def upload_data(cls, dataset_id: str, data: Union[pd.DataFrame, list,
                                                      tuple]) -> dict:
        result = GsSession.current._post('/data/{}'.format(dataset_id),
                                         payload=data)
        return result

    @classmethod
    def get_definition(cls, dataset_id: str) -> DataSetEntity:
        definition = cls.__definitions.get(dataset_id)
        if not definition:
            definition = GsSession.current._get(
                '/data/datasets/{}'.format(dataset_id), cls=DataSetEntity)
            if not definition:
                raise MqValueError('Unknown dataset {}'.format(dataset_id))

            cls.__definitions[dataset_id] = definition

        return definition

    @staticmethod
    def build_market_data_query(asset_ids: List[str],
                                query_type: QueryType,
                                where: Union[FieldFilterMap] = None,
                                source: Union[str] = None,
                                real_time: bool = False):
        inner = {
            'assetIds': asset_ids,
            'queryType': query_type.value,
            'where': where or {},
            'source': source or 'any',
            'frequency': 'Real Time' if real_time else 'End Of Day',
            'measures': ['Curve']
        }
        if real_time:
            inner['startTime'] = DataContext.current.start_time
            inner['endTime'] = DataContext.current.end_time
        else:
            inner['startDate'] = DataContext.current.start_date
            inner['endDate'] = DataContext.current.end_date
        return {'queries': [inner]}

    @classmethod
    def get_market_data(cls, query) -> pd.DataFrame:
        GsSession.current: GsSession
        body = GsSession.current._post('/data/markets', payload=query)
        container = body['responses'][0]['queryResponse'][0]
        if 'errorMessages' in container:
            raise MqValueError(container['errorMessages'])
        if 'response' not in container:
            return pd.DataFrame()
        df = pd.DataFrame(container['response']['data'])
        df.set_index('date' if 'date' in df.columns else 'time', inplace=True)
        df.index = pd.to_datetime(df.index)
        return df

    @classmethod
    def __normalise_coordinate_data(
        cls, data: Iterable[Union[MDAPIDataQueryResponse, dict]]
    ) -> Iterable[Iterable[dict]]:
        ret = []
        for response in data:
            coord_data = []
            rows = (r.as_dict() for r in response.data) if isinstance(
                response, MDAPIDataQueryResponse) else response.get(
                    'data', ())

            for pt in rows:
                if not pt:
                    continue

                if 'value' not in pt:
                    value_field = pt['mktQuotingStyle']
                    pt['value'] = pt.pop(value_field)

                coord_data.append(pt)
            ret.append(coord_data)

        return ret

    @classmethod
    def __df_from_coordinate_data(cls, data: Iterable[dict]) -> pd.DataFrame:
        df = cls._sort_coordinate_data(pd.DataFrame.from_records(data))
        index_field = next((f for f in ('time', 'date') if f in df.columns),
                           None)
        if index_field:
            df = df.set_index(pd.DatetimeIndex(df.loc[:, index_field].values))

        return df

    @classmethod
    def _sort_coordinate_data(
        cls,
        df: pd.DataFrame,
        by: Tuple[str] = ('date', 'time', 'mktType', 'mktAsset', 'mktClass',
                          'mktPoint', 'mktQuotingStyle', 'value')
    ) -> pd.DataFrame:
        columns = df.columns
        field_order = [f for f in by if f in columns]
        field_order.extend(f for f in columns if f not in field_order)
        return df[field_order]

    @classmethod
    def _coordinate_from_str(cls, coordinate_str: str) -> MarketDataCoordinate:
        tmp = coordinate_str.rsplit(".", 1)
        dimensions = tmp[0].split("_")
        if len(dimensions) < 2:
            raise MqValueError('invalid coordinate ' + coordinate_str)

        kwargs = {
            'mkt_type': dimensions[0],
            'mkt_asset': dimensions[1] or None,
            'mkt_quoting_style': tmp[-1] if len(tmp) > 1 else None
        }

        if len(dimensions) > 2:
            kwargs['mkt_class'] = dimensions[2] or None

        if len(dimensions) > 3:
            kwargs['mkt_point'] = tuple(dimensions[3:]) or None

        return MarketDataCoordinate(**kwargs)

    @classmethod
    def coordinates_last(
        cls,
        coordinates: Union[Iterable[str], Iterable[MarketDataCoordinate]],
        as_of: Optional[dt.datetime] = None,
        vendor: str = 'Goldman Sachs',
        as_dataframe: bool = False,
    ) -> Union[dict, pd.DataFrame]:
        market_data_coordinates = tuple(
            cls._coordinate_from_str(coord) if isinstance(coord, str
                                                          ) else coord
            for coord in coordinates)
        ret = {coordinate: None for coordinate in market_data_coordinates}
        query = cls.build_query(
            end=as_of,
            market_data_coordinates=market_data_coordinates,
            vendor=vendor)

        data = cls.last_data(query)

        for idx, row in enumerate(cls.__normalise_coordinate_data(data)):
            try:
                ret[market_data_coordinates[idx]] = row[0]['value']
            except IndexError:
                ret[market_data_coordinates[idx]] = None

        if as_dataframe:
            data = [
                dict(
                    chain(
                        c.as_dict(as_camel_case=True).items(),
                        (('value', v), ))) for c, v in ret.items()
            ]
            return cls.__df_from_coordinate_data(data)

        return ret

    @classmethod
    def coordinates_data(
        cls,
        coordinates: Union[str, MarketDataCoordinate, Iterable[str],
                           Iterable[MarketDataCoordinate]],
        start: Optional[dt.datetime] = None,
        end: Optional[dt.datetime] = None,
        vendor: str = 'Goldman Sachs',
        as_multiple_dataframes: bool = False
    ) -> Union[pd.DataFrame, Tuple[pd.DataFrame]]:
        coordinates_iterable = (coordinates, ) if isinstance(
            coordinates, (MarketDataCoordinate, str)) else coordinates
        query = cls.build_query(market_data_coordinates=tuple(
            cls._coordinate_from_str(coord) if isinstance(coord, str
                                                          ) else coord
            for coord in coordinates_iterable),
                                vendor=vendor,
                                start=start,
                                end=end)

        results = cls.__normalise_coordinate_data(cls.query_data(query))

        if as_multiple_dataframes:
            return tuple(
                GsDataApi.__df_from_coordinate_data(r) for r in results)
        else:
            return cls.__df_from_coordinate_data(chain.from_iterable(results))

    @classmethod
    def coordinates_data_series(
        cls,
        coordinates: Union[str, MarketDataCoordinate, Iterable[str],
                           Iterable[MarketDataCoordinate]],
        start: Optional[dt.datetime] = None,
        end: Optional[dt.datetime] = None,
        vendor: str = 'Goldman Sachs',
    ) -> Union[pd.Series, Tuple[pd.Series]]:
        dfs = cls.coordinates_data(coordinates,
                                   start=start,
                                   end=end,
                                   vendor=vendor,
                                   as_multiple_dataframes=True)

        ret = tuple(pd.Series() if df.empty else pd.
                    Series(index=df.index, data=df.value.values) for df in dfs)
        if isinstance(coordinates, (MarketDataCoordinate, str)):
            return ret[0]
        else:
            return ret

    @staticmethod
    @cachetools.cached(TTLCache(ttl=3600, maxsize=128))
    def get_types(dataset_id: str):
        results = GsSession.current._get(f'/data/catalog/{dataset_id}')
        fields = results.get("fields")
        if fields:
            field_types = {}
            for key, value in fields.items():
                field_type = value.get('type')
                field_format = value.get('format')
                field_types[key] = field_format or field_type
            return field_types
        raise RuntimeError(f"Unable to get Dataset schema for {dataset_id}")

    @classmethod
    def construct_dataframe_with_types(
            cls, dataset_id: str, data: Union[Base, list,
                                              tuple]) -> pd.DataFrame:
        """
        Constructs a dataframe with correct date types.
        :param data: data to convert with correct types
        :return: dataframe with correct types
        """
        if len(data):
            dataset_types = cls.get_types(dataset_id)
            df = pd.DataFrame(data)

            for field_name, type_name in dataset_types.items():
                if df.get(type_name) is not None and type_name in (
                        'date', 'date-time'):
                    df = df.astype({field_name: numpy.datetime64})

            field_names = dataset_types.keys()

            if 'date' in field_names:
                df = df.set_index('date')
            elif 'time' in field_names:
                df = df.set_index('time')

            return df
        else:
            return pd.DataFrame({})
예제 #4
0
"""Util functions to help run generic searches for craigslist toronto"""

from bs4 import BeautifulSoup as bs4
from cachetools import cached, TTLCache
from collections import OrderedDict
import json
import requests
import sys

URL_BASE = 'https://toronto.craigslist.org'
cache = TTLCache(maxsize=120, ttl=300)

AREAS = {
    'toronto': 'tor',
    'durham_region': 'drh',
    'york_region': 'yrk',
    'brampton': 'bra',
    'mississuaga': 'mss',
    'oakville': 'oak'
}

CATEGORY_TOPICS = {
    'community': 'ccc',
    'events': 'eee',
    'for_sale': 'sss',
    'gigs': 'ggg',
    'housing': 'hhh',
    'jobs': 'jjj',
    'resume': 'rrr',
    'services': 'bbb'
}
    def __init__(self, check, config, shutdown_callback):
        collection_interval = float(
            config.statement_samples_config.get('collection_interval',
                                                DEFAULT_COLLECTION_INTERVAL))
        if collection_interval <= 0:
            collection_interval = DEFAULT_COLLECTION_INTERVAL
        super(PostgresStatementSamples, self).__init__(
            check,
            rate_limit=1 / collection_interval,
            run_sync=is_affirmative(
                config.statement_samples_config.get('run_sync', False)),
            enabled=is_affirmative(
                config.statement_samples_config.get('enabled', True)),
            dbms="postgres",
            min_collection_interval=config.min_collection_interval,
            expected_db_exceptions=(psycopg2.errors.DatabaseError, ),
            job_name="query-samples",
            shutdown_callback=shutdown_callback,
        )
        self._check = check
        self._config = config
        self._tags_no_db = None
        self._activity_last_query_start = None
        # The value is loaded when connecting to the main database
        self._explain_function = config.statement_samples_config.get(
            'explain_function', 'datadog.explain_statement')
        self._obfuscate_options = to_native_string(
            json.dumps(self._config.obfuscator_options))

        self._collection_strategy_cache = TTLCache(
            maxsize=config.statement_samples_config.get(
                'collection_strategy_cache_maxsize', 1000),
            ttl=config.statement_samples_config.get(
                'collection_strategy_cache_ttl', 300),
        )

        self._explain_errors_cache = TTLCache(
            maxsize=config.statement_samples_config.get(
                'explain_errors_cache_maxsize', 5000),
            # only try to re-explain invalid statements once per day
            ttl=config.statement_samples_config.get('explain_errors_cache_ttl',
                                                    24 * 60 * 60),
        )

        # explained_statements_ratelimiter: limit how often we try to re-explain the same query
        self._explained_statements_ratelimiter = RateLimitingTTLCache(
            maxsize=int(
                config.statement_samples_config.get(
                    'explained_queries_cache_maxsize', 5000)),
            ttl=60 * 60 / int(
                config.statement_samples_config.get(
                    'explained_queries_per_hour_per_query', 60)),
        )

        # seen_samples_ratelimiter: limit the ingestion rate per (query_signature, plan_signature)
        self._seen_samples_ratelimiter = RateLimitingTTLCache(
            # assuming ~100 bytes per entry (query & plan signature, key hash, 4 pointers (ordered dict), expiry time)
            # total size: 10k * 100 = 1 Mb
            maxsize=int(
                config.statement_samples_config.get(
                    'seen_samples_cache_maxsize', 10000)),
            ttl=60 * 60 / int(
                config.statement_samples_config.get(
                    'samples_per_hour_per_query', 15)),
        )

        self._activity_coll_enabled = is_affirmative(
            self._config.statement_activity_config.get('enabled', True))
        # activity events cannot be reported more often than regular samples
        self._activity_coll_interval = max(
            self._config.statement_activity_config.get(
                'collection_interval', DEFAULT_ACTIVITY_COLLECTION_INTERVAL),
            collection_interval,
        )
        self._activity_max_rows = self._config.statement_activity_config.get(
            'payload_row_limit', 3500)
        # Keep track of last time we sent an activity event
        self._time_since_last_activity_event = 0
        self._pg_stat_activity_cols = None
예제 #6
0
from flask import render_template, url_for, flash, redirect
from . import app, db
from .models import Post
from cachetools import cached, LRUCache, TTLCache
import re
import json
from . import _config
# from .post_orm import Post


@app.route("/")
@app.route("/home")
@cached(cache=TTLCache(maxsize=_config.CACHE_MAXSIZE, ttl=_config.CACHE_TTL))
def home():
    posts = Post.query.filter(
        Post.similar_post_info != json.dumps([])).order_by(
            Post.id.desc()).all()
    non_duplicate_post = []
    for post in posts:
        if post.id in non_duplicate_post:
            continue
        post.get_similar_post_info()
        similar_post_json = post.similar_post_to_json()
        non_duplicate_post.append(post.id)
        for similar_post in similar_post_json:
            if similar_post['score'] > _config.DD_THRESHOLD:
                non_duplicate_post.append(similar_post['id'])
    all_posts = Post.query.order_by(Post.id.desc()).all()
    for p in all_posts:
        if p.url:
            p.domain = re.search('https?:\/\/([\w.]+)\/', p.url).group(1)
예제 #7
0
        logger.info(f"running hook {url}")
        data = {
            "id": self.user_id,
            "date": str(datetime.now()),
            "type": "CutFileCompletedEvent",
            "data": {
                "room_id": int(self.room_id),
                "path": str(self.out_path)
            },
        }
        httpx.post(url, json=data)
        logger.info(f"run hook {url} successfully")


if TYPE_CHECKING:
    cut_tasks: TTLCache[str, CutTask] = TTLCache(maxsize=100, ttl=60 * 60 * 1)
else:
    cut_tasks = TTLCache(maxsize=100, ttl=60 * 60 * 1)


def check_sub(user_id: str, uid: str) -> Optional[str]:
    sub_list = get_sub_list(user_id)
    if uid not in sub_list:
        return "尚未订阅该主播"
    if not sub_list[uid].get("record", False):
        return "未开启该主播的自动录播"


async def check_task(room_id: str) -> Union[str, Path]:
    task_info = await get_task(room_id)
    if not task_info:
예제 #8
0
import requests
from cachetools import cached, TTLCache

api_url = "https://freegeoip.app/json/"


@cached(cache=TTLCache(maxsize=1000, ttl=345600)
        )  # removes a given entry from cache every 4 days
def get_ip_data(ip):
    response = requests.get(api_url + ip)
    return response.json()
예제 #9
0
from backend.models.postgis.project import Project
from backend.models.postgis.user import User, UserRole, MappingLevel, UserEmail
from backend.models.postgis.task import TaskHistory, TaskAction, Task
from backend.models.dtos.user_dto import UserTaskDTOs
from backend.models.dtos.stats_dto import Pagination
from backend.models.postgis.statuses import TaskStatus, ProjectStatus
from backend.models.postgis.utils import NotFound
from backend.services.users.osm_service import OSMService, OSMServiceError
from backend.services.messaging.smtp_service import SMTPService
from backend.services.messaging.template_service import (
    get_template,
    template_var_replacing,
)


user_filter_cache = TTLCache(maxsize=1024, ttl=600)


class UserServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when in the User Service """

    def __init__(self, message):
        if current_app:
            current_app.logger.error(message)


class UserService:
    @staticmethod
    def get_user_by_id(user_id: int) -> User:
        user = User.get_by_id(user_id)
예제 #10
0
import uuid

from cachetools import TTLCache
from datetime import datetime
from http import HTTPStatus
from sanic import (exceptions, response)
from random import randint

from application import APIApplication
from decorators import require_authorization
from schemas import (Command, Task, TaskView)

# Set up the application.
app = APIApplication('todos-api')
# Set up memory-based storage backend for the TODO items.
collection = TTLCache(32, 1800)


def _update_data(pk, data):
    """Updates data in collection.

    Used via event loop's `call_later` to simulate eventually consistent architecture.

    Arguments:
        pk: Primary key for the item to update.
        data: Data to set. Note: to delete items, set it to `None`.
    """
    if data is None:
        del collection[pk]
    else:
        collection[pk] = data
예제 #11
0
import os
import re
from collections import namedtuple

import pandas as pd
import requests
from bs4 import BeautifulSoup
from cachetools import cached, TTLCache

from helpers import config
from models.attendance import Attendance
from models.raid import Raid
from models.raidevent import RaidEvent

# CACHE CONFIGURATION
_cache = TTLCache(maxsize=100, ttl=60)

# PANDAS DISPLAY CONFIGURATION
pd.set_option('display.max_rows', 25)
pd.set_option('display.max_columns', 10)
pd.set_option('display.width', 1000)
pd.set_option('display.column_space', 25)

# EQDKP API CONFIGURATION
_URL = os.getenv('EQDKP_URL')
_API_URL = _URL + 'api.php'
_API_TOKEN = os.getenv('EQDKP_API_TOKEN')
_API_HEADERS = {'X-Custom-Authorization': f'token={_API_TOKEN}&type=api'}

# GET STANDINGS CONFIGURATION
EQDKP_COLUMNS = ['CHARACTER', 'CLASS', 'DKP', '30DAY', '60DAY', '90DAY']
예제 #12
0
 def __init__(self, capacity, *indices):
     self._cache = TTLCache(capacity, TTL, timer=time)
     self._indices = indices
예제 #13
0
from flask import current_app
from sqlalchemy import text, func

from backend import create_app, db
from backend.models.dtos.message_dto import MessageDTO, MessagesDTO
from backend.models.dtos.stats_dto import Pagination
from backend.models.postgis.message import Message, MessageType, NotFound
from backend.models.postgis.notification import Notification
from backend.models.postgis.project import Project
from backend.models.postgis.task import TaskStatus, TaskAction, TaskHistory
from backend.services.messaging.smtp_service import SMTPService
from backend.services.messaging.template_service import get_template, get_profile_url
from backend.services.users.user_service import UserService, User


message_cache = TTLCache(maxsize=512, ttl=30)


class MessageServiceError(Exception):
    """ Custom Exception to notify callers an error occurred when handling mapping """

    def __init__(self, message):
        if current_app:
            current_app.logger.error(message)


class MessageService:
    @staticmethod
    def send_welcome_message(user: User):
        """ Sends welcome message to all new users at Sign up"""
        text_template = get_template("welcome_message_en.txt")
예제 #14
0
class ProjectService:
    @staticmethod
    def get_project_by_id(project_id: int) -> Project:
        project = Project.get(project_id)
        if project is None:
            raise NotFound()

        return project

    @staticmethod
    def exists(project_id: int) -> bool:
        project = Project.exists(project_id)
        if project is None:
            raise NotFound()
        return True

    @staticmethod
    def get_project_by_name(project_id: int) -> Project:
        project = Project.get(project_id)
        if project is None:
            raise NotFound()

        return project

    @staticmethod
    def auto_unlock_tasks(project_id: int):
        Task.auto_unlock_tasks(project_id)

    @staticmethod
    def delete_tasks(project_id: int, tasks_ids):
        # Validate project exists.
        project = Project.get(project_id)
        if project is None:
            raise NotFound({"project": project_id})

        tasks = [{"id": i, "obj": Task.get(i, project_id)} for i in tasks_ids]

        # In case a task is not found.
        not_found = [t["id"] for t in tasks if t["obj"] is None]
        if len(not_found) > 0:
            raise NotFound({"tasks": not_found})

        # Delete task one by one.
        [t["obj"].delete() for t in tasks]

    @staticmethod
    def get_contribs_by_day(project_id: int) -> ProjectContribsDTO:
        # Validate that project exists
        project = ProjectService.get_project_by_id(project_id)

        # Fetch all state change with date and task ID
        stats = (
            TaskHistory.query.with_entities(
                TaskHistory.action_text.label("action_text"),
                func.DATE(TaskHistory.action_date).label("day"),
                TaskHistory.task_id.label("task_id"),
            )
            .filter(TaskHistory.project_id == project_id)
            .filter(
                TaskHistory.action == "STATE_CHANGE",
                or_(
                    TaskHistory.action_text == "MAPPED",
                    TaskHistory.action_text == "VALIDATED",
                    TaskHistory.action_text == "INVALIDATED",
                ),
            )
            .group_by("action_text", "day", "task_id")
            .order_by("day")
        ).all()

        contribs_dto = ProjectContribsDTO()
        # Filter and store unique dates
        dates = list(set(r[1] for r in stats))
        dates.sort(
            reverse=False
        )  # Why was this reversed? To have the dates in ascending order
        dates_list = []
        cumulative_mapped = 0
        cumulative_validated = 0
        # A hashmap to track task state change updates
        tasks = {
            "MAPPED": {"total": 0},
            "VALIDATED": {"total": 0},
            "INVALIDATED": {"total": 0},
        }

        for date in dates:
            dto = ProjectContribDTO(
                {
                    "date": date,
                    "mapped": 0,
                    "validated": 0,
                    "total_tasks": project.total_tasks,
                }
            )
            # s -> ('LOCKED_FOR_MAPPING', datetime.date(2019, 4, 23), 1)
            # s[0] -> action, s[1] -> date, s[2] -> task_id
            values = [(s[0], s[2]) for s in stats if date == s[1]]
            values.sort(reverse=True)  # Most recent action comes first
            for val in values:
                task_id = val[1]
                task_status = val[0]

                if task_status == "MAPPED":
                    if task_id not in tasks["MAPPED"]:
                        tasks["MAPPED"][task_id] = 1
                        tasks["MAPPED"]["total"] += 1
                        dto.mapped += 1
                elif task_status == "VALIDATED":
                    if task_id not in tasks["VALIDATED"]:
                        tasks["VALIDATED"][task_id] = 1
                        tasks["VALIDATED"]["total"] += 1
                        dto.validated += 1
                        if task_id in tasks["INVALIDATED"]:
                            del tasks["INVALIDATED"][task_id]
                            tasks["INVALIDATED"]["total"] -= 1
                        if task_id not in tasks["MAPPED"]:
                            tasks["MAPPED"][task_id] = 1
                            tasks["MAPPED"]["total"] += 1
                            dto.mapped += 1
                else:
                    if task_id not in tasks["INVALIDATED"]:
                        tasks["INVALIDATED"][task_id] = 1
                        tasks["INVALIDATED"]["total"] += 1
                        if task_id in tasks["MAPPED"]:
                            del tasks["MAPPED"][task_id]
                            tasks["MAPPED"]["total"] -= 1
                            if dto.mapped > 0:
                                dto.mapped -= 1
                        if task_id in tasks["VALIDATED"]:
                            del tasks["VALIDATED"][task_id]
                            tasks["VALIDATED"]["total"] -= 1
                            if dto.validated > 0:
                                dto.validated -= 1

                cumulative_mapped = tasks["MAPPED"]["total"]
                cumulative_validated = tasks["VALIDATED"]["total"]
                dto.cumulative_mapped = cumulative_mapped
                dto.cumulative_validated = cumulative_validated
            dates_list.append(dto)

        contribs_dto.stats = dates_list

        return contribs_dto

    @staticmethod
    def get_project_dto_for_mapper(
        project_id, current_user_id, locale="en", abbrev=False
    ) -> ProjectDTO:
        """
        Get the project DTO for mappers
        :param project_id: ID of the Project mapper has requested
        :param locale: Locale the mapper has requested
        :raises ProjectServiceError, NotFound
        """
        project = ProjectService.get_project_by_id(project_id)
        # if project is public and is not draft, we don't need to check permissions
        if not project.private and not project.status == ProjectStatus.DRAFT.value:
            return project.as_dto_for_mapping(current_user_id, locale, abbrev)

        is_allowed_user = True
        is_team_member = None
        is_manager_permission = False

        if current_user_id:
            is_manager_permission = ProjectAdminService.is_user_action_permitted_on_project(
                current_user_id, project_id
            )
        # Draft Projects - admins, authors, org admins & team managers permitted
        if project.status == ProjectStatus.DRAFT.value:
            if not is_manager_permission:
                is_allowed_user = False
                raise ProjectServiceError("Unable to fetch project")

        # Private Projects - allowed_users, admins, org admins &
        # assigned teams (mappers, validators, project managers), authors permitted

        if project.private and not is_manager_permission:
            is_allowed_user = False
            if current_user_id:
                is_allowed_user = (
                    len(
                        [
                            user
                            for user in project.allowed_users
                            if user.id == current_user_id
                        ]
                    )
                    > 0
                )

        if not (is_allowed_user or is_manager_permission):
            if current_user_id:
                allowed_roles = [
                    TeamRoles.MAPPER.value,
                    TeamRoles.VALIDATOR.value,
                    TeamRoles.PROJECT_MANAGER.value,
                ]
                is_team_member = TeamService.check_team_membership(
                    project_id, allowed_roles, current_user_id
                )

        if is_allowed_user or is_manager_permission or is_team_member:
            return project.as_dto_for_mapping(current_user_id, locale, abbrev)
        else:
            raise ProjectServiceError("Unable to fetch project")

    @staticmethod
    def get_project_tasks(
        project_id,
        task_ids_str: str,
        order_by: str = None,
        order_by_type: str = "ASC",
        status: int = None,
    ):
        project = ProjectService.get_project_by_id(project_id)
        return project.tasks_as_geojson(task_ids_str, order_by, order_by_type, status)

    @staticmethod
    def get_project_aoi(project_id):
        project = ProjectService.get_project_by_id(project_id)
        return project.get_aoi_geometry_as_geojson()

    @staticmethod
    def get_project_priority_areas(project_id):
        project = ProjectService.get_project_by_id(project_id)
        geojson_areas = []
        for priority_area in project.priority_areas:
            geojson_areas.append(priority_area.get_as_geojson())
        return geojson_areas

    @staticmethod
    def get_task_for_logged_in_user(user_id: int):
        """ if the user is working on a task in the project return it """
        tasks = Task.get_locked_tasks_for_user(user_id)

        tasks_dto = tasks
        return tasks_dto

    @staticmethod
    def get_task_details_for_logged_in_user(user_id: int, preferred_locale: str):
        """ if the user is working on a task in the project return it """
        tasks = Task.get_locked_tasks_details_for_user(user_id)

        if len(tasks) == 0:
            raise NotFound()

        # TODO put the task details in to a DTO
        dtos = []
        for task in tasks:
            dtos.append(task.as_dto_with_instructions(preferred_locale))

        task_dtos = TaskDTOs()
        task_dtos.tasks = dtos

        return task_dtos

    @staticmethod
    def is_user_in_the_allowed_list(allowed_users: list, current_user_id: int):
        """For private projects, check if user is present in the allowed list"""
        return (
            len([user.id for user in allowed_users if user.id == current_user_id]) > 0
        )

    @staticmethod
    def evaluate_mapping_permission(
        project_id: int, user_id: int, mapping_permission: int
    ):
        allowed_roles = [
            TeamRoles.MAPPER.value,
            TeamRoles.VALIDATOR.value,
            TeamRoles.PROJECT_MANAGER.value,
        ]
        is_team_member = TeamService.check_team_membership(
            project_id, allowed_roles, user_id
        )

        # mapping_permission = 1(level),2(teams),3(teamsAndLevel)
        if mapping_permission == MappingPermission.TEAMS.value:
            if not is_team_member:
                return False, MappingNotAllowed.USER_NOT_TEAM_MEMBER

        elif mapping_permission == MappingPermission.LEVEL.value:
            if not ProjectService._is_user_intermediate_or_advanced(user_id):
                return False, MappingNotAllowed.USER_NOT_CORRECT_MAPPING_LEVEL

        elif mapping_permission == MappingPermission.TEAMS_LEVEL.value:
            if not ProjectService._is_user_intermediate_or_advanced(user_id):
                return False, MappingNotAllowed.USER_NOT_CORRECT_MAPPING_LEVEL
            if not is_team_member:
                return False, MappingNotAllowed.USER_NOT_TEAM_MEMBER

    @staticmethod
    def is_user_permitted_to_map(project_id: int, user_id: int):
        """ Check if the user is allowed to map the on the project in scope """
        if UserService.is_user_blocked(user_id):
            return False, MappingNotAllowed.USER_NOT_ON_ALLOWED_LIST

        project = ProjectService.get_project_by_id(project_id)
        if project.license_id:
            if not UserService.has_user_accepted_license(user_id, project.license_id):
                return False, MappingNotAllowed.USER_NOT_ACCEPTED_LICENSE
        mapping_permission = project.mapping_permission

        is_manager_permission = (
            False  # is_admin or is_author or is_org_manager or is_manager_team
        )
        if ProjectAdminService.is_user_action_permitted_on_project(user_id, project_id):
            is_manager_permission = True

        # Draft (public/private) accessible only for is_manager_permission
        if (
            ProjectStatus(project.status) == ProjectStatus.DRAFT
            and not is_manager_permission
        ):
            return False, MappingNotAllowed.PROJECT_NOT_PUBLISHED

        is_restriction = None
        if not is_manager_permission and mapping_permission:
            is_restriction = ProjectService.evaluate_mapping_permission(
                project_id, user_id, mapping_permission
            )

        tasks = Task.get_locked_tasks_for_user(user_id)
        if len(tasks.locked_tasks) > 0:
            return False, MappingNotAllowed.USER_ALREADY_HAS_TASK_LOCKED

        is_allowed_user = None
        if project.private and not is_manager_permission:
            # Check if user is in allowed user list
            is_allowed_user = ProjectService.is_user_in_the_allowed_list(
                project.allowed_users, user_id
            )
            if is_allowed_user:
                return True, "User allowed to map"

        if not is_manager_permission and is_restriction:
            return is_restriction
        elif project.private and not (
            is_manager_permission or is_allowed_user or not is_restriction
        ):
            return False, MappingNotAllowed.USER_NOT_ON_ALLOWED_LIST

        return True, "User allowed to map"

    @staticmethod
    def _is_user_intermediate_or_advanced(user_id):
        """ Helper method to determine if user level is not beginner """
        user_mapping_level = UserService.get_mapping_level(user_id)
        if user_mapping_level not in [MappingLevel.INTERMEDIATE, MappingLevel.ADVANCED]:
            return False

        return True

    @staticmethod
    def evaluate_validation_permission(
        project_id: int, user_id: int, validation_permission: int
    ):
        allowed_roles = [TeamRoles.VALIDATOR.value, TeamRoles.PROJECT_MANAGER.value]
        is_team_member = TeamService.check_team_membership(
            project_id, allowed_roles, user_id
        )
        # validation_permission = 1(level),2(teams),3(teamsAndLevel)
        if validation_permission == ValidationPermission.TEAMS.value:
            if not is_team_member:
                return False, ValidatingNotAllowed.USER_NOT_TEAM_MEMBER

        elif validation_permission == ValidationPermission.LEVEL.value:
            if not ProjectService._is_user_intermediate_or_advanced(user_id):
                return False, ValidatingNotAllowed.USER_IS_BEGINNER

        elif validation_permission == ValidationPermission.TEAMS_LEVEL.value:
            if not ProjectService._is_user_intermediate_or_advanced(user_id):
                return False, ValidatingNotAllowed.USER_IS_BEGINNER
            if not is_team_member:
                return False, ValidatingNotAllowed.USER_NOT_TEAM_MEMBER

    @staticmethod
    def is_user_permitted_to_validate(project_id, user_id):
        """ Check if the user is allowed to validate on the project in scope """
        if UserService.is_user_blocked(user_id):
            return False, ValidatingNotAllowed.USER_NOT_ON_ALLOWED_LIST

        project = ProjectService.get_project_by_id(project_id)
        if project.license_id:
            if not UserService.has_user_accepted_license(user_id, project.license_id):
                return False, ValidatingNotAllowed.USER_NOT_ACCEPTED_LICENSE
        validation_permission = project.validation_permission

        # is_admin or is_author or is_org_manager or is_manager_team
        is_manager_permission = False
        if ProjectAdminService.is_user_action_permitted_on_project(user_id, project_id):
            is_manager_permission = True

        # Draft (public/private) accessible only for is_manager_permission
        if (
            ProjectStatus(project.status) == ProjectStatus.DRAFT
            and not is_manager_permission
        ):
            return False, ValidatingNotAllowed.PROJECT_NOT_PUBLISHED

        is_restriction = None
        if not is_manager_permission and validation_permission:
            is_restriction = ProjectService.evaluate_validation_permission(
                project_id, user_id, validation_permission
            )

        tasks = Task.get_locked_tasks_for_user(user_id)
        if len(tasks.locked_tasks) > 0:
            return False, ValidatingNotAllowed.USER_ALREADY_HAS_TASK_LOCKED

        is_allowed_user = None
        if project.private and not is_manager_permission:
            # Check if user is in allowed user list
            is_allowed_user = ProjectService.is_user_in_the_allowed_list(
                project.allowed_users, user_id
            )

            if is_allowed_user:
                return True, "User allowed to validate"

        if not is_manager_permission and is_restriction:
            return is_restriction
        elif project.private and not (
            is_manager_permission or is_allowed_user or not is_restriction
        ):
            return False, ValidatingNotAllowed.USER_NOT_ON_ALLOWED_LIST

        return True, "User allowed to validate"

    @staticmethod
    @cached(summary_cache)
    def get_project_summary(
        project_id: int, preferred_locale: str = "en"
    ) -> ProjectSummary:
        """ Gets the project summary DTO """
        project = ProjectService.get_project_by_id(project_id)
        return project.get_project_summary(preferred_locale)

    @staticmethod
    def set_project_as_featured(project_id: int):
        """ Sets project as featured """
        project = ProjectService.get_project_by_id(project_id)
        project.set_as_featured()

    @staticmethod
    def unset_project_as_featured(project_id: int):
        """ Sets project as featured """
        project = ProjectService.get_project_by_id(project_id)
        project.unset_as_featured()

    @staticmethod
    def get_featured_projects(preferred_locale):
        """ Sets project as featured """
        query = ProjectSearchService.create_search_query()
        projects = query.filter(Project.featured == true()).group_by(Project.id).all()

        # Get total contributors.
        contrib_counts = ProjectSearchService.get_total_contributions(projects)
        zip_items = zip(projects, contrib_counts)

        dto = ProjectSearchResultsDTO()
        dto.results = [
            ProjectSearchService.create_result_dto(p, preferred_locale, t)
            for p, t in zip_items
        ]

        return dto

    @staticmethod
    def is_favorited(project_id: int, user_id: int) -> bool:
        project = ProjectService.get_project_by_id(project_id)

        return project.is_favorited(user_id)

    @staticmethod
    def favorite(project_id: int, user_id: int):
        project = ProjectService.get_project_by_id(project_id)
        project.favorite(user_id)

    @staticmethod
    def unfavorite(project_id: int, user_id: int):
        project = ProjectService.get_project_by_id(project_id)
        project.unfavorite(user_id)

    @staticmethod
    def get_project_title(project_id: int, preferred_locale: str = "en") -> str:
        """ Gets the project title DTO """
        project = ProjectService.get_project_by_id(project_id)
        return project.get_project_title(preferred_locale)

    @staticmethod
    @cached(TTLCache(maxsize=1024, ttl=600))
    def get_project_stats(project_id: int) -> ProjectStatsDTO:
        """ Gets the project stats DTO """
        project = ProjectService.get_project_by_id(project_id)
        return project.get_project_stats()

    @staticmethod
    def get_project_user_stats(project_id: int, username: str) -> ProjectUserStatsDTO:
        """ Gets the user stats for a specific project """
        project = ProjectService.get_project_by_id(project_id)
        user = UserService.get_user_by_username(username)
        return project.get_project_user_stats(user.id)

    def get_project_teams(project_id: int):
        project = ProjectService.get_project_by_id(project_id)

        if project is None:
            raise NotFound()

        return project.teams

    @staticmethod
    def get_project_organisation(project_id: int) -> Organisation:
        project = ProjectService.get_project_by_id(project_id)

        if project is None:
            raise NotFound()

        return project.organisation
예제 #15
0
from datetime import date, timedelta
from typing import List, Union

from cachetools import TTLCache
from cachetools.keys import hashkey
from dateutil.relativedelta import relativedelta, FR, SA, SU, TH, TU, WE, MO
from numpy import busday_offset
from pandas import Series, to_datetime, DataFrame

from gs_quant.api.gs.data import GsDataApi
from gs_quant.markets.securities import ExchangeCode
from gs_quant.target.common import Currency

DATE_LOW_LIMIT = date(1952, 1, 1)
DATE_HIGH_LIMIT = date(2052, 12, 31)
_cache = TTLCache(maxsize=128, ttl=600)
_logger = logging.getLogger(__name__)


class RDateRule(ABC):
    result: date
    number: int
    week_mask: str
    currencies: List[Union[Currency, str]] = None
    exchanges: List[Union[ExchangeCode, str]] = None
    holiday_calendar: List[date] = None

    def __init__(self, result: date, **params):
        self.result = result
        self.number = params.get('number')
        self.week_mask = params.get('week_mask')
from telegram import Chat, ChatMember, ParseMode, Update
from telegram.ext import CallbackContext

from LEGEND import (
    DEL_CMDS,
    DEMONS,
    DEV_USERS,
    DRAGONS,
    SUPPORT_CHAT,
    TIGERS,
    WOLVES,
    dispatcher,
)

# stores admemes in memory for 10 min.
ADMIN_CACHE = TTLCache(maxsize=512, ttl=60 * 10)
THREAD_LOCK = RLock()


def is_whitelist_plus(chat: Chat,
                      user_id: int,
                      member: ChatMember = None) -> bool:
    return any(user_id in user
               for user in [WOLVES, TIGERS, DEMONS, DRAGONS, DEV_USERS])


def is_support_plus(chat: Chat,
                    user_id: int,
                    member: ChatMember = None) -> bool:
    return user_id in DEMONS or user_id in DRAGONS or user_id in DEV_USERS
class PairListManager():

    def __init__(self, exchange, config: dict) -> None:
        self._exchange = exchange
        self._config = config
        self._whitelist = self._config['exchange'].get('pair_whitelist')
        self._blacklist = self._config['exchange'].get('pair_blacklist', [])
        self._pairlist_handlers: List[IPairList] = []
        self._tickers_needed = False
        for pairlist_handler_config in self._config.get('pairlists', None):
            if 'method' not in pairlist_handler_config:
                logger.warning(f"No method found in {pairlist_handler_config}, ignoring.")
                continue
            pairlist_handler = PairListResolver.load_pairlist(
                    pairlist_handler_config['method'],
                    exchange=exchange,
                    pairlistmanager=self,
                    config=config,
                    pairlistconfig=pairlist_handler_config,
                    pairlist_pos=len(self._pairlist_handlers)
                    )
            self._tickers_needed |= pairlist_handler.needstickers
            self._pairlist_handlers.append(pairlist_handler)

        if not self._pairlist_handlers:
            raise OperationalException("No Pairlist Handlers defined")

    @property
    def whitelist(self) -> List[str]:
        """
        Has the current whitelist
        """
        return self._whitelist

    @property
    def blacklist(self) -> List[str]:
        """
        Has the current blacklist
        -> no need to overwrite in subclasses
        """
        return self._blacklist

    @property
    def name_list(self) -> List[str]:
        """
        Get list of loaded Pairlist Handler names
        """
        return [p.name for p in self._pairlist_handlers]

    def short_desc(self) -> List[Dict]:
        """
        List of short_desc for each Pairlist Handler
        """
        return [{p.name: p.short_desc()} for p in self._pairlist_handlers]

    @cached(TTLCache(maxsize=1, ttl=1800))
    def _get_cached_tickers(self):
        return self._exchange.get_tickers()

    def refresh_pairlist(self) -> None:
        """
        Run pairlist through all configured Pairlist Handlers.
        """
        # Tickers should be cached to avoid calling the exchange on each call.
        tickers: Dict = {}
        if self._tickers_needed:
            tickers = self._get_cached_tickers()

        # Adjust whitelist if filters are using tickers
        pairlist = self._prepare_whitelist(self._whitelist.copy(), tickers)

        # Generate the pairlist with first Pairlist Handler in the chain
        pairlist = self._pairlist_handlers[0].gen_pairlist(self._whitelist, tickers)

        # Process all Pairlist Handlers in the chain
        for pairlist_handler in self._pairlist_handlers:
            pairlist = pairlist_handler.filter_pairlist(pairlist, tickers)

        # Validation against blacklist happens after the chain of Pairlist Handlers
        # to ensure blacklist is respected.
        pairlist = self.verify_blacklist(pairlist, logger.warning)

        self._whitelist = pairlist

    def _prepare_whitelist(self, pairlist: List[str], tickers) -> List[str]:
        """
        Prepare sanitized pairlist for Pairlist Handlers that use tickers data - remove
        pairs that do not have ticker available
        """
        if self._tickers_needed:
            # Copy list since we're modifying this list
            for p in deepcopy(pairlist):
                if p not in tickers:
                    pairlist.remove(p)

        return pairlist

    def verify_blacklist(self, pairlist: List[str], logmethod) -> List[str]:
        """
        Verify and remove items from pairlist - returning a filtered pairlist.
        Logs a warning or info depending on `aswarning`.
        Pairlist Handlers explicitly using this method shall use
        `logmethod=logger.info` to avoid spamming with warning messages
        :param pairlist: Pairlist to validate
        :param logmethod: Function that'll be called, `logger.info` or `logger.warning`.
        :return: pairlist - blacklisted pairs
        """
        for pair in deepcopy(pairlist):
            if pair in self._blacklist:
                logmethod(f"Pair {pair} in your blacklist. Removing it from whitelist...")
                pairlist.remove(pair)
        return pairlist

    def create_pair_list(self, pairs: List[str], timeframe: str = None) -> ListPairsWithTimeframes:
        """
        Create list of pair tuples with (pair, timeframe)
        """
        return [(pair, timeframe or self._config['timeframe']) for pair in pairs]
예제 #18
0
        return self.get(item)


def return_attr(func):
    def wrapper(*args, **kwargs):
        res = func(*args, **kwargs)
        if isinstance(res, dict):
            return AttributeDict(res)
        return res

    return wrapper


@return_attr
@check_chat
@cached(TTLCache(maxsize=1024, ttl=30))
def get_group_setting(group_id):
    conn, cur = get_cur()
    try:
        query = f"""
select jointime_pin as jtp,
       game_started_pin as stp,
       is_confirm_tsww_enable as cts,
       is_confirm_score_enable as cs,
       is_startnewgame_enable as stn,
       start_mode as stm,
       disabled_by
from v1.all_group_helper
where group_id = %s
        """
        cur.execute(query, (group_id, ))
예제 #19
0
파일: app.py 프로젝트: totalhack/chatbot
from chatbot.utils import dbg, jsonr
from toolbox import warn, error, json, st

db.init_app(app)
load_bot_configs(app.config)
setup_caching(app.config)

slack_events_adapter = SlackEventAdapter(app.config['SLACK_SIGNING_SECRET'],
                                         endpoint="/slack/event",
                                         server=app)
slack_client = SlackClient(app.config['SLACK_BOT_TOKEN'])

# TODO: better home and configurable for production
SLACK_CONVO_CACHE_SIZE = 1000
SLACK_CONVO_CACHE_TTL = 60 * 60
slack_convo_cache = TTLCache(SLACK_CONVO_CACHE_SIZE, SLACK_CONVO_CACHE_TTL)


@slack_events_adapter.on("app_mention")
def slack_app_mention(event_data):
    dbg('App mention!')
    dbg(event_data)


@slack_events_adapter.on("message")
def slack_message(event_data):
    dbg('Message!')
    dbg(event_data)

    event = event_data["event"]
    slack_app_id = event_data["api_app_id"]
예제 #20
0
class Schism(IStrategy):
    """
    Strategy Configuration Items
    """
    timeframe = '5m'
    inf_timeframe = '1h'

    buy_params = {
        'inf-pct-adr': 0.83534,
        'inf-rsi': 57,
        'mp': 64,
        'rmi-fast': 49,
        'rmi-slow': 24,
        'xinf-stake-rmi': 45,
        'xtf-fiat-rsi': 28,
        'xtf-stake-rsi': 90
    }

    sell_params = {}

    minimal_roi = {
        "0": 0.05,
        "10": 0.025,
        "20": 0.015,
        "30": 0.01,
        "720": 0.005,
        "1440": 0
    }

    stoploss = -0.30

    # Recommended
    use_sell_signal = False
    sell_profit_only = False
    ignore_roi_if_buy_signal = True

    startup_candle_count: int = 72

    # Startegy Specific Variable Storage
    custom_trade_info = {}
    custom_fiat = "USD"
    custom_current_price_cache: TTLCache = TTLCache(maxsize=100,
                                                    ttl=300)  # 5 minutes
    """
    Informative Pair Definitions
    """
    def informative_pairs(self):
        # add existing pairs from whitelist on the inf_timeframe
        pairs = self.dp.current_whitelist()
        informative_pairs = [(pair, self.inf_timeframe) for pair in pairs]

        # add additional informative pairs based on certain stakes
        if self.config['stake_currency'] in ('BTC', 'ETH'):
            for pair in pairs:
                # add in the COIN/FIAT pairs (e.g. XLM/USD) on base timeframe
                coin, stake = pair.split('/')
                coin_fiat = f"{coin}/{self.custom_fiat}"
                informative_pairs += [(coin_fiat, self.timeframe)]

            # add in the STAKE/FIAT pair (e.g. BTC/USD) on base and inf timeframes
            stake_fiat = f"{self.config['stake_currency']}/{self.custom_fiat}"
            informative_pairs += [(stake_fiat, self.timeframe)]
            informative_pairs += [(stake_fiat, self.inf_timeframe)]

        return informative_pairs

    """
    Indicator Definitions
    """

    def populate_indicators(self, dataframe: DataFrame,
                            metadata: dict) -> DataFrame:
        # Populate/update the trade data if there is any, set trades to false if not live/dry
        self.custom_trade_info[metadata['pair']] = self.populate_trades(
            metadata['pair'])

        # Set up primary indicators
        dataframe['rmi-slow'] = RMI(dataframe, length=21, mom=5)
        dataframe['rmi-fast'] = RMI(dataframe, length=8, mom=4)

        # Momentum Pinball
        dataframe['roc'] = ta.ROC(dataframe, timeperiod=6)
        dataframe['mp'] = ta.RSI(dataframe['roc'], timeperiod=6)

        # Trend Calculations
        dataframe['rmi-up'] = np.where(
            dataframe['rmi-slow'] >= dataframe['rmi-slow'].shift(), 1, 0)
        dataframe['rmi-dn'] = np.where(
            dataframe['rmi-slow'] <= dataframe['rmi-slow'].shift(), 1, 0)
        dataframe['rmi-up-trend'] = np.where(
            dataframe['rmi-up'].rolling(3, min_periods=1).sum() >= 2, 1, 0)
        dataframe['rmi-dn-trend'] = np.where(
            dataframe['rmi-dn'].rolling(3, min_periods=1).sum() >= 2, 1, 0)

        # Informative for STAKE/FIAT and COIN/FIAT on default timeframe, only relevant if stake currency is BTC or ETH
        if self.config['stake_currency'] in ('BTC', 'ETH'):
            coin, stake = metadata['pair'].split('/')
            fiat = self.custom_fiat
            coin_fiat = f"{coin}/{fiat}"
            stake_fiat = f"{stake}/{fiat}"

            # COIN/FIAT (e.g. XLM/USD) - timeframe
            coin_fiat_tf = self.dp.get_pair_dataframe(pair=coin_fiat,
                                                      timeframe=self.timeframe)
            dataframe[f"{fiat}_rsi"] = ta.RSI(coin_fiat_tf, timeperiod=14)

            # STAKE/FIAT (e.g. BTC/USD) - inf_timeframe
            stake_fiat_tf = self.dp.get_pair_dataframe(
                pair=stake_fiat, timeframe=self.timeframe)
            stake_fiat_inf_tf = self.dp.get_pair_dataframe(
                pair=stake_fiat, timeframe=self.inf_timeframe)

            dataframe[f"{stake}_rsi"] = ta.RSI(stake_fiat_tf, timeperiod=14)
            dataframe[f"{stake}_rmi_{self.inf_timeframe}"] = RMI(
                stake_fiat_inf_tf, length=21, mom=5)

        # Informative indicators for current pair on inf_timeframe
        informative = self.dp.get_pair_dataframe(pair=metadata['pair'],
                                                 timeframe=self.inf_timeframe)
        informative['rsi'] = ta.RSI(informative, timeperiod=14)

        informative['1d_high'] = informative['close'].rolling(24).max()
        informative['3d_low'] = informative['close'].rolling(72).min()
        informative['adr'] = informative['1d_high'] - informative['3d_low']

        dataframe = merge_informative_pair(dataframe,
                                           informative,
                                           self.timeframe,
                                           self.inf_timeframe,
                                           ffill=True)

        return dataframe

    """
    Buy Trigger Signals
    """

    def populate_buy_trend(self, dataframe: DataFrame,
                           metadata: dict) -> DataFrame:
        params = self.get_pair_params(metadata['pair'], 'buy')
        trade_data = self.custom_trade_info[metadata['pair']]
        conditions = []

        # Persist a buy signal for existing trades to make use of ignore_roi_if_buy_signal = True
        # when this buy signal is not present a sell can happen according to the defined ROI table
        if trade_data['active_trade']:
            # peak_profit factor f(x)=1-x/400, rmi 30 -> 0.925, rmi 80 -> 0.80
            profit_factor = (1 - (dataframe['rmi-slow'].iloc[-1] / 400))
            # grow from 30 -> 70 after 720 minutes starting after 180 minutes
            rmi_grow = self.linear_growth(30, 70, 180, 720,
                                          trade_data['open_minutes'])

            conditions.append(dataframe['rmi-up-trend'] == 1)
            conditions.append(
                trade_data['current_profit'] > (trade_data['peak_profit'] *
                                                profit_factor))
            conditions.append(dataframe['rmi-slow'] >= rmi_grow)

        # Normal buy triggers that apply to new trades we want to enter
        else:
            # Primary buy triggers
            conditions.append(
                # "buy the dip" based buy signal using momentum pinball and downward RMI
                (dataframe['close'] <=
                 dataframe[f"3d_low_{self.inf_timeframe}"] +
                 (params['inf-pct-adr'] *
                  dataframe[f"adr_{self.inf_timeframe}"]))
                & (dataframe[f"rsi_{self.inf_timeframe}"] >= params['inf-rsi'])
                & (dataframe['rmi-dn-trend'] == 1)
                & (dataframe['rmi-slow'] >= params['rmi-slow'])
                & (dataframe['rmi-fast'] <= params['rmi-fast'])
                & (dataframe['mp'] <= params['mp']))

            # If the stake is BTC or ETH apply additional conditions
            if self.config['stake_currency'] in ('BTC', 'ETH'):
                # default timeframe conditions
                conditions.append((
                    dataframe[f"{self.config['stake_currency']}_rsi"] <
                    params['xtf-stake-rsi'])
                                  | (dataframe[f"{self.custom_fiat}_rsi"] >
                                     params['xtf-fiat-rsi']))
                # informative timeframe conditions
                conditions.append(dataframe[
                    f"{self.config['stake_currency']}_rmi_{self.inf_timeframe}"]
                                  < params['xinf-stake-rmi'])

        # Anything below here applies to persisting and new buy signal
        conditions.append(dataframe['volume'].gt(0))

        if conditions:
            dataframe.loc[reduce(lambda x, y: x & y, conditions), 'buy'] = 1

        return dataframe

    """
    Sell Trigger Signals
    """

    def populate_sell_trend(self, dataframe: DataFrame,
                            metadata: dict) -> DataFrame:
        params = self.get_pair_params(metadata['pair'], 'sell')
        trade_data = self.custom_trade_info[metadata['pair']]
        conditions = []

        # In this strategy all sells for profit happen according to ROI
        # This sell signal is designed only as a "dynamic stoploss"

        # if we are in an active trade for this pair
        if trade_data['active_trade']:
            # grow from -0.03 -> 0 after 300 minutes starting immediately
            loss_cutoff = self.linear_growth(-0.03, 0, 0, 300,
                                             trade_data['open_minutes'])

            # if we are at a loss, consider what the trend looks and preempt the stoploss
            conditions.append((trade_data['current_profit'] < loss_cutoff)
                              & (trade_data['current_profit'] > self.stoploss)
                              & (dataframe['rmi-dn-trend'] == 1)
                              & (dataframe['volume'].gt(0)))

            # if the peak profit was positive at some point but never reached ROI, set a higher cross point for exit
            if trade_data['peak_profit'] > 0:
                conditions.append(
                    qtpylib.crossed_below(dataframe['rmi-slow'], 50))
            # if the trade was always negative, the bounce we expected didn't happen
            else:
                conditions.append(
                    qtpylib.crossed_below(dataframe['rmi-slow'], 10))

            # if there are other open trades in addition to this one, consider the average profit
            # across them all and how many free slots we have in our sell decision
            if trade_data['other_trades']:
                if trade_data['free_slots'] > 0:
                    """
                    Less free slots, more willing to sell
                    1 / free_slots * x = 
                    1 slot = 1/1 * -0.04 = -0.04 -> only allow sells if avg_other_proift above -0.04
                    4 slot = 1/4 * -0.04 = -0.01 -> only allow sells is avg_other_profit above -0.01
                    """
                    max_market_down = -0.04
                    hold_pct = (1 / trade_data['free_slots']) * max_market_down
                    conditions.append(
                        trade_data['avg_other_profit'] >= hold_pct)
                else:
                    # if were out of slots, allow the biggest losing trade to sell regardless of avg profit
                    conditions.append(trade_data['biggest_loser'] == True)

        # Impossible condition to satisfy the bot when it looks here and theres no active trade
        else:
            conditions.append(dataframe['volume'].lt(0))

        if conditions:
            dataframe.loc[reduce(lambda x, y: x & y, conditions), 'sell'] = 1

        return dataframe

    """
    Super Legit Custom Methods
    """

    # Populate trades_data from the database
    def populate_trades(self, pair: str) -> dict:
        # Initialize the trades dict if it doesn't exist, persist it otherwise
        if not pair in self.custom_trade_info:
            self.custom_trade_info[pair] = {}

        # init the temp dicts and set the trade stuff to false
        trade_data = {}
        trade_data['active_trade'] = trade_data['other_trades'] = trade_data[
            'biggest_loser'] = False
        self.custom_trade_info['meta'] = {}

        # active trade stuff only works in live and dry, not backtest
        if self.config['runmode'].value in ('live', 'dry_run'):

            # find out if we have an open trade for this pair
            active_trade = Trade.get_trades([
                Trade.pair == pair,
                Trade.is_open.is_(True),
            ]).all()

            # if so, get some information
            if active_trade:
                # get current price and update the min/max rate
                current_rate = self.get_current_price(pair, True)
                active_trade[0].adjust_min_max_rates(current_rate)

                # get how long the trade has been open in minutes and candles
                present = arrow.utcnow()
                trade_start = arrow.get(active_trade[0].open_date)
                open_minutes = (present -
                                trade_start).total_seconds() // 60  # floor

                # set up the things we use in the strategy
                trade_data['active_trade'] = True
                trade_data['current_profit'] = active_trade[
                    0].calc_profit_ratio(current_rate)
                trade_data['peak_profit'] = max(
                    0, active_trade[0].calc_profit_ratio(
                        active_trade[0].max_rate))
                trade_data['open_minutes']: int = open_minutes
                trade_data['open_candles']: int = (
                    open_minutes // active_trade[0].timeframe)  # floor
            else:
                trade_data['current_profit'] = trade_data['peak_profit'] = 0.0
                trade_data['open_minutes'] = trade_data['open_candles'] = 0

            # if there are open trades not including the current pair, get some information
            # future reference, for *all* open trades: open_trades = Trade.get_open_trades()
            other_trades = Trade.get_trades([
                Trade.pair != pair,
                Trade.is_open.is_(True),
            ]).all()

            if other_trades:
                trade_data['other_trades'] = True
                other_profit = tuple(
                    trade.calc_profit_ratio(
                        self.get_current_price(trade.pair, False))
                    for trade in other_trades)
                trade_data['avg_other_profit'] = mean(other_profit)
                # find which of our trades is the biggest loser
                if trade_data['current_profit'] < min(other_profit):
                    trade_data['biggest_loser'] = True
            else:
                trade_data['avg_other_profit'] = 0

            # get the number of free trade slots, storing in every pairs dict due to laziness
            open_trades = len(Trade.get_open_trades())
            trade_data['free_slots'] = max(
                0, self.config['max_open_trades'] - open_trades)

        return trade_data

    # Get the current price from the exchange (or cache)
    def get_current_price(self, pair: str, refresh: bool) -> float:
        if not refresh:
            rate = self.custom_current_price_cache.get(pair)
            # Check if cache has been invalidated
            if rate:
                return rate

        ask_strategy = self.config.get('ask_strategy', {})
        if ask_strategy.get('use_order_book', False):
            ob = self.dp.orderbook(pair, 1)
            rate = ob[f"{ask_strategy['price_side']}s"][0][0]
        else:
            ticker = self.dp.ticker(pair)
            rate = ticker['last']

        self.custom_current_price_cache[pair] = rate
        return rate

    # linear growth, starts at X and grows to Y after A minutes (starting after B miniutes)
    # f(t) = X + (rate * t), where rate = (Y - X) / (A - B)
    def linear_growth(self, start: float, end: float, start_time: int,
                      end_time: int, trade_time: int) -> float:
        time = max(0, trade_time - start_time)
        rate = (end - start) / (end_time - start_time)
        return min(end, start + (rate * time))

    """
    Allow for buy/sell override parameters per pair. Testing, might remove.
    TODO:
        If good: make this more robust so you never have to edit this method.
        Consider: per-pair ROI if it seems worthwhile?
    """

    def get_pair_params(self, pair: str, side: str) -> Dict:
        buy_params = self.buy_params
        sell_params = self.sell_params

        ### Stake: USD
        if pair in ('ABC/XYZ', 'DEF/XYZ'):
            buy_params = self.buy_params_GROUP1
            sell_params = self.sell_params_GROUP1
        elif pair in ('QRD/WTF'):
            buy_params = self.buy_params_QRD
            sell_params = self.sell_params_QRD

        if side == 'sell':
            return sell_params

        return buy_params

    """
    Price protection on trade entry and timeouts, built-in Freqtrade functionality
    https://www.freqtrade.io/en/latest/strategy-advanced/
    """

    def check_buy_timeout(self, pair: str, trade: Trade, order: dict,
                          **kwargs) -> bool:
        bid_strategy = self.config.get('bid_strategy', {})
        ob = self.dp.orderbook(pair, 1)
        current_price = ob[f"{bid_strategy['price_side']}s"][0][0]
        # Cancel buy order if price is more than 1% above the order.
        if current_price > order['price'] * 1.01:
            return True
        return False

    def check_sell_timeout(self, pair: str, trade: Trade, order: dict,
                           **kwargs) -> bool:
        ask_strategy = self.config.get('ask_strategy', {})
        ob = self.dp.orderbook(pair, 1)
        current_price = ob[f"{ask_strategy['price_side']}s"][0][0]
        # Cancel sell order if price is more than 1% below the order.
        if current_price < order['price'] * 0.99:
            return True
        return False

    def confirm_trade_entry(self, pair: str, order_type: str, amount: float,
                            rate: float, time_in_force: str, **kwargs) -> bool:
        bid_strategy = self.config.get('bid_strategy', {})
        ob = self.dp.orderbook(pair, 1)
        current_price = ob[f"{bid_strategy['price_side']}s"][0][0]
        # Cancel buy order if price is more than 1% above the order.
        if current_price > rate * 1.01:
            return False
        return True
예제 #21
0
from SaitamaRobot import (
    DEL_CMDS,
    DEV_USERS,
    DRAGONS,
    SUPPORT_CHAT,
    DEMONS,
    TIGERS,
    WOLVES,
    dispatcher,
)

from telegram import Chat, ChatMember, ParseMode, Update
from telegram.ext import CallbackContext

# stores admemes in memory for 10 min.
ADMIN_CACHE = TTLCache(maxsize=512, ttl=60 * 10, timer=perf_counter)
THREAD_LOCK = RLock()


def is_whitelist_plus(chat: Chat, user_id: int, member: ChatMember = None) -> bool:
    return any(user_id in user for user in [WOLVES, TIGERS, DEMONS, DRAGONS, DEV_USERS])


def is_support_plus(chat: Chat, user_id: int, member: ChatMember = None) -> bool:
    return user_id in DEMONS or user_id in DRAGONS or user_id in DEV_USERS


def is_sudo_plus(chat: Chat, user_id: int, member: ChatMember = None) -> bool:
    return user_id in DRAGONS or user_id in DEV_USERS

예제 #22
0
파일: data.py 프로젝트: akshay7194/gs-quant
class GsDataApi(DataApi):
    __definitions = {}
    __asset_coordinates_cache = TTLCache(10000, 86400)
    DEFAULT_SCROLL = '30s'

    # DataApi interface

    @classmethod
    def query_data(cls, query: Union[DataQuery, MDAPIDataQuery], dataset_id: str = None,
                   asset_id_type: Union[GsIdType, str] = None) \
            -> Union[MDAPIDataBatchResponse, DataQueryResponse, tuple]:
        if isinstance(query, MDAPIDataQuery) and query.market_data_coordinates:
            # Don't use MDAPIDataBatchResponse for now - it doesn't handle quoting style correctly
            results: Union[MDAPIDataBatchResponse,
                           dict] = cls.execute_query('coordinates', query)
            if isinstance(results, dict):
                return results.get('responses', ())
            else:
                return results.responses if results.responses is not None else (
                )
        elif isinstance(query, DataQuery) and query.where:
            where = query.where.as_dict() if isinstance(
                query.where, FieldFilterMap) else query.where
            xref_keys = set(where.keys()).intersection(XRef.properties())
            if xref_keys:
                # Check that assetId is a symbol dimension of this data set. If not, we need to do a separate query
                # to resolve xref pip install dtaidistance--> assetId
                if len(xref_keys) > 1:
                    raise MqValueError(
                        'Cannot not specify more than one type of asset identifier'
                    )

                definition = cls.get_definition(dataset_id)

                sd = definition.dimensions.symbolDimensions
                if definition.parameters.symbolStrategy == 'MDAPI' or (
                        'assetId' not in sd and 'gsid' not in sd):
                    xref_type = min(xref_keys)
                    if asset_id_type is None:
                        asset_id_type = xref_type

                    xref_values = where[asset_id_type]
                    xref_values = (xref_values, ) if isinstance(
                        xref_values, str) else xref_values
                    asset_id_map = GsAssetApi.map_identifiers(
                        xref_type, GsIdType.id, xref_values)

                    if len(asset_id_map) != len(xref_values):
                        raise MqValueError(
                            'Not all {} were resolved to asset Ids'.format(
                                asset_id_type))

                    setattr(query.where, xref_type, None)
                    query.where.assetId = [
                        asset_id_map[x] for x in xref_values
                    ]

        response: Union[DataQueryResponse,
                        dict] = cls.execute_query(dataset_id, query)

        results = cls.get_results(dataset_id, response, query)

        if asset_id_type not in {GsIdType.id, None}:
            asset_ids = tuple(
                set(filter(None, (r.get('assetId') for r in results))))
            if asset_ids:
                xref_map = GsAssetApi.map_identifiers(GsIdType.id,
                                                      asset_id_type, asset_ids)

                if len(xref_map) != len(asset_ids):
                    raise MqValueError(
                        'Not all asset Ids were resolved to {}'.format(
                            asset_id_type))

                for result in results:
                    result[asset_id_type] = xref_map[result['assetId']]

        return results

    @staticmethod
    def execute_query(dataset_id: str, query: Union[DataQuery,
                                                    MDAPIDataQuery]):
        return GsSession.current._post('/data/{}/query'.format(dataset_id),
                                       payload=query)

    @staticmethod
    def get_results(dataset_id: str, response: Union[DataQueryResponse, dict],
                    query: DataQuery) -> list:
        if isinstance(response, dict):
            total_pages = response.get('totalPages')
            results = response.get('data', ())
        else:
            total_pages = response.total_pages if response.total_pages is not None else 0
            results = response.data if response.data is not None else ()

        if total_pages:
            if query.page is None:
                query.page = total_pages - 1
                results = results + GsDataApi.get_results(
                    dataset_id, GsDataApi.execute_query(dataset_id, query),
                    query)
            elif query.page - 1 > 0:
                query.page -= 1
                results = results + GsDataApi.get_results(
                    dataset_id, GsDataApi.execute_query(dataset_id, query),
                    query)
            else:
                return results

        return results

    @classmethod
    def last_data(cls,
                  query: Union[DataQuery, MDAPIDataQuery],
                  dataset_id: str = None) -> Union[list, tuple]:
        if getattr(query, 'marketDataCoordinates', None):
            result = GsSession.current._post('/data/coordinates/query/last',
                                             payload=query)
            return result.get('responses', ())
        else:
            result = GsSession.current._post(
                '/data/{}/last/query'.format(dataset_id), payload=query)
            return result.get('data', ())

    @classmethod
    def symbol_dimensions(cls, dataset_id: str) -> tuple:
        definition = cls.get_definition(dataset_id)
        return definition.dimensions.symbolDimensions

    @classmethod
    def time_field(cls, dataset_id: str) -> str:
        definition = cls.get_definition(dataset_id)
        return definition.dimensions.timeField

    # GS-specific functionality

    @classmethod
    def get_coverage(cls,
                     dataset_id: str,
                     scroll: str = DEFAULT_SCROLL,
                     scroll_id: Optional[str] = None,
                     limit: int = None,
                     offset: int = None,
                     fields: List[str] = None,
                     include_history: bool = False) -> List[dict]:
        params = {}
        if scroll:
            params['scroll'] = scroll

        if scroll_id:
            params['scrollId'] = scroll_id

        if not limit:
            limit = 4000
        params['limit'] = limit

        if offset:
            params['offset'] = offset

        if fields:
            params['fields'] = fields

        if include_history:
            params['includeHistory'] = 'true'

        body = GsSession.current._get('/data/{}/coverage'.format(dataset_id),
                                      payload=params)
        results = body['results']
        if len(results) > 0 and 'scrollId' in body:
            return results + cls.get_coverage(dataset_id,
                                              scroll_id=body['scrollId'],
                                              scroll=GsDataApi.DEFAULT_SCROLL,
                                              limit=limit)
        else:
            return results

    @classmethod
    def create(cls, definition: Union[DataSetEntity, dict]) -> DataSetEntity:
        result = GsSession.current._post('/data/datasets', payload=definition)
        return result

    @classmethod
    def update_definition(
            cls, dataset_id: str, definition: Union[DataSetEntity,
                                                    dict]) -> DataSetEntity:
        result = GsSession.current._put('/data/datasets/{}'.format(dataset_id),
                                        payload=definition,
                                        cls=DataSetEntity)
        return result

    @classmethod
    def upload_data(cls, dataset_id: str, data: Union[pd.DataFrame, list,
                                                      tuple]) -> dict:
        result = GsSession.current._post('/data/{}'.format(dataset_id),
                                         payload=data)
        return result

    @classmethod
    def get_definition(cls, dataset_id: str) -> DataSetEntity:
        definition = cls.__definitions.get(dataset_id)
        if not definition:
            definition = GsSession.current._get(
                '/data/datasets/{}'.format(dataset_id), cls=DataSetEntity)
            if not definition:
                raise MqValueError('Unknown dataset {}'.format(dataset_id))

            cls.__definitions[dataset_id] = definition

        return definition

    @classmethod
    def get_many_definitions(
            cls,
            limit: int = 100,
            dataset_id: str = None,
            owner_id: str = None,
            name: str = None,
            mq_symbol: str = None) -> Tuple[DataSetEntity, ...]:

        query_string = urlencode(
            dict(
                filter(
                    lambda item: item[1] is not None,
                    dict(id=dataset_id,
                         ownerId=owner_id,
                         name=name,
                         mqSymbol=mq_symbol,
                         limit=limit).items())))

        res = GsSession.current._get(
            '/data/datasets?{query}'.format(query=query_string),
            cls=DataSetEntity)['results']
        return res

    @classmethod
    @cachetools.cached(__asset_coordinates_cache)
    def get_many_coordinates(
        cls,
        mkt_type: str = None,
        mkt_asset: str = None,
        mkt_class: str = None,
        mkt_point: Tuple[str, ...] = (),
        *,
        limit: int = 100,
        return_type: type = str,
    ) -> Union[Tuple[str, ...], Tuple[MarketDataCoordinate, ...]]:
        where = FieldFilterMap(
            mkt_type=mkt_type.upper() if mkt_type is not None else None,
            mkt_asset=mkt_asset.upper() if mkt_asset is not None else None,
            mkt_class=mkt_class.upper() if mkt_class is not None else None,
        )
        for index, point in enumerate(mkt_point):
            setattr(where, 'mkt_point' + str(index + 1), point.upper())

        query = EntityQuery(where=where, limit=limit)
        results = GsSession.current._post('/data/mdapi/query',
                                          query)['results']

        if return_type is str:
            return tuple(coordinate['name'] for coordinate in results)
        elif return_type is MarketDataCoordinate:
            return tuple(
                MarketDataCoordinate(
                    mkt_type=coordinate['dimensions']['mktType'],
                    mkt_asset=coordinate['dimensions']['mktAsset'],
                    mkt_class=coordinate['dimensions']['mktClass'],
                    mkt_point=tuple(coordinate['dimensions']
                                    ['mktPoint'].values()),
                    mkt_quoting_style=coordinate['dimensions']
                    ['mktQuotingStyle']) for coordinate in results)
        else:
            raise NotImplementedError('Unsupported return type')

    @staticmethod
    def build_market_data_query(asset_ids: List[str],
                                query_type: QueryType,
                                where: Union[FieldFilterMap, Dict] = None,
                                source: Union[str] = None,
                                real_time: bool = False):
        inner = {
            'assetIds': asset_ids,
            'queryType': query_type.value,
            'where': where or {},
            'source': source or 'any',
            'frequency': 'Real Time' if real_time else 'End Of Day',
            'measures': ['Curve']
        }
        if real_time:
            inner['startTime'] = DataContext.current.start_time
            inner['endTime'] = DataContext.current.end_time
        else:
            inner['startDate'] = DataContext.current.start_date
            inner['endDate'] = DataContext.current.end_date
        return {'queries': [inner]}

    @classmethod
    def get_data_providers(cls, entity_id: str) -> Dict:
        """Return daily and real-time data providers

        :param entity_id: identifier of entity i.e. asset, country, subdivision
        :return: dictionary of available data providers

        ** Usage **

        Return a dictionary containing a set of dataset providers for each available data field.
        For each field will return a dict of daily and real-time dataset providers where available.
        """

        GsSession.current: GsSession
        body = GsSession.current._get(
            f'/data/measures/{entity_id}/availability')
        if 'errorMessages' in body:
            raise MqValueError(
                f"data availablity request {body['requestId']} failed: {body.get('errorMessages', '')}"
            )
        if 'data' not in body:
            providers = {}
        else:
            providers = {}

            all_data_mappings = sorted(body['data'],
                                       key=lambda x: x['rank'],
                                       reverse=True)

            for source in all_data_mappings:

                freq = source.get('frequency', 'End Of Day')
                dataset_field = source.get('datasetField', '')
                rank = source.get('rank')

                providers.setdefault(dataset_field, {})

                if rank:
                    if freq == 'End Of Day':
                        providers[dataset_field][
                            DataFrequency.DAILY] = source['datasetId']
                    elif freq == 'Real Time':
                        providers[dataset_field][
                            DataFrequency.REAL_TIME] = source['datasetId']

        return providers

    @classmethod
    def get_market_data(cls, query) -> pd.DataFrame:
        GsSession.current: GsSession
        body = GsSession.current._post('/data/markets', payload=query)
        container = body['responses'][0]['queryResponse'][0]
        if 'errorMessages' in container:
            raise MqValueError(
                f"market data request {body['requestId']} failed: {container['errorMessages']}"
            )
        if 'response' not in container:
            df = MarketDataResponseFrame()
        else:
            df = MarketDataResponseFrame(container['response']['data'])
            df.set_index('date' if 'date' in df.columns else 'time',
                         inplace=True)
            df.index = pd.to_datetime(df.index)
        df.dataset_ids = tuple(container.get('dataSetIds', ()))
        return df

    @classmethod
    def __normalise_coordinate_data(
        cls,
        data: Iterable[Union[MDAPIDataQueryResponse, Dict]],
        fields: Optional[Tuple[MDAPIQueryField, ...]] = None
    ) -> Iterable[Iterable[Dict]]:
        ret = []
        for response in data:
            coord_data = []
            rows = (r.as_dict() for r in response.data) if isinstance(
                response, MDAPIDataQueryResponse) else response.get(
                    'data', ())

            for pt in rows:
                if not pt:
                    continue

                if not fields and 'value' not in pt:
                    value_field = pt['mktQuotingStyle']
                    pt['value'] = pt.pop(value_field)

                coord_data.append(pt)
            ret.append(coord_data)

        return ret

    @classmethod
    def __df_from_coordinate_data(
            cls,
            data: Iterable[Dict],
            *,
            use_datetime_index: Optional[bool] = True) -> pd.DataFrame:
        df = cls._sort_coordinate_data(pd.DataFrame.from_records(data))
        index_field = next((f for f in ('time', 'date') if f in df.columns),
                           None)
        if index_field and use_datetime_index:
            df = df.set_index(pd.DatetimeIndex(df.loc[:, index_field].values))

        return df

    @classmethod
    def _sort_coordinate_data(
        cls,
        df: pd.DataFrame,
        by: Tuple[str] = ('date', 'time', 'mktType', 'mktAsset', 'mktClass',
                          'mktPoint', 'mktQuotingStyle', 'value')
    ) -> pd.DataFrame:
        columns = df.columns
        field_order = [f for f in by if f in columns]
        field_order.extend(f for f in columns if f not in field_order)
        return df[field_order]

    @classmethod
    def _coordinate_from_str(cls, coordinate_str: str) -> MarketDataCoordinate:
        tmp = coordinate_str.rsplit(".", 1)
        dimensions = tmp[0].split("_")
        if len(dimensions) < 2:
            raise MqValueError('invalid coordinate ' + coordinate_str)

        kwargs = {
            'mkt_type': dimensions[0],
            'mkt_asset': dimensions[1] or None,
            'mkt_quoting_style': tmp[-1] if len(tmp) > 1 else None
        }

        if len(dimensions) > 2:
            kwargs['mkt_class'] = dimensions[2] or None

        if len(dimensions) > 3:
            kwargs['mkt_point'] = tuple(dimensions[3:]) or None

        return MarketDataCoordinate(**kwargs)

    @classmethod
    def coordinates_last(
        cls,
        coordinates: Union[Iterable[str], Iterable[MarketDataCoordinate]],
        as_of: Union[dt.datetime, dt.date] = None,
        vendor: MarketDataVendor = MarketDataVendor.Goldman_Sachs,
        as_dataframe: bool = False,
        pricing_location: Optional[PricingLocation] = None
    ) -> Union[Dict, pd.DataFrame]:
        """
        Get last value of coordinates data

        :param coordinates: market data coordinate(s)
        :param as_of: snapshot date or time
        :param vendor: data vendor
        :param as_dataframe: whether to return the result as Dataframe
        :param pricing_location: the location where close data has been recorded (not used for real-time query)
        :return: Dataframe or dictionary of the returned data

        **Examples**

        >>> coordinate = ("FX Fwd_USD/EUR_Fwd Pt_2y",)
        >>> data = GsDataApi.coordinates_last(coordinate, dt.datetime(2019, 11, 19))
        """
        market_data_coordinates = tuple(
            cls._coordinate_from_str(coord) if isinstance(coord, str
                                                          ) else coord
            for coord in coordinates)
        query = cls.build_query(
            end=as_of,
            market_data_coordinates=market_data_coordinates,
            vendor=vendor,
            pricing_location=pricing_location)

        data = cls.last_data(query)

        if not as_dataframe:
            ret = {coordinate: None for coordinate in market_data_coordinates}
            for idx, row in enumerate(cls.__normalise_coordinate_data(data)):
                try:
                    ret[market_data_coordinates[idx]] = row[0]['value']
                except IndexError:
                    ret[market_data_coordinates[idx]] = None
            return ret

        ret = []
        for idx, row in enumerate(cls.__normalise_coordinate_data(data)):
            coordinate_as_dict = market_data_coordinates[idx].as_dict(
                as_camel_case=True)
            try:
                ret.append(
                    dict(
                        chain(coordinate_as_dict.items(),
                              (('value', row[0]['value']),
                               ('time', row[0]['time'])))))
            except IndexError:
                ret.append(
                    dict(
                        chain(coordinate_as_dict.items(),
                              (('value', None), ('time', None)))))
        return cls.__df_from_coordinate_data(ret, use_datetime_index=False)

    @classmethod
    def coordinates_data(
            cls,
            coordinates: Union[str, MarketDataCoordinate, Iterable[str],
                               Iterable[MarketDataCoordinate]],
            start: Union[dt.datetime, dt.date] = None,
            end: Union[dt.datetime, dt.date] = None,
            vendor: MarketDataVendor = MarketDataVendor.Goldman_Sachs,
            as_multiple_dataframes: bool = False,
            pricing_location: Optional[PricingLocation] = None,
            fields: Optional[Tuple[MDAPIQueryField, ...]] = None,
            **kwargs) -> Union[pd.DataFrame, Tuple[pd.DataFrame]]:
        """
        Get coordinates data

        :param coordinates: market data coordinate(s)
        :param start: start date or time
        :param end: end date or time
        :param vendor: data vendor
        :param as_multiple_dataframes: whether to return the result as one or multiple Dataframe(s)
        :param pricing_location: the location where close data has been recorded (not used for real-time query)
        :param fields: value fields to return
        :param kwargs: Extra query arguments
        :return: Dataframe(s) of the returned data

        **Examples**

        >>> coordinate = ("FX Fwd_USD/EUR_Fwd Pt_2y",)
        >>> data = GsDataApi.coordinates_data(coordinate, dt.datetime(2019, 11, 18), dt.datetime(2019, 11, 19))
        """
        coordinates_iterable = (coordinates, ) if isinstance(
            coordinates, (MarketDataCoordinate, str)) else coordinates
        query = cls.build_query(market_data_coordinates=tuple(
            cls._coordinate_from_str(coord) if isinstance(coord, str
                                                          ) else coord
            for coord in coordinates_iterable),
                                vendor=vendor,
                                start=start,
                                end=end,
                                pricing_location=pricing_location,
                                fields=fields,
                                **kwargs)

        results = cls.__normalise_coordinate_data(cls.query_data(query),
                                                  fields=fields)

        if as_multiple_dataframes:
            return tuple(
                GsDataApi.__df_from_coordinate_data(r) for r in results)
        else:
            return cls.__df_from_coordinate_data(chain.from_iterable(results))

    @classmethod
    def coordinates_data_series(
            cls,
            coordinates: Union[str, MarketDataCoordinate, Iterable[str],
                               Iterable[MarketDataCoordinate]],
            start: Union[dt.datetime, dt.date] = None,
            end: Union[dt.datetime, dt.date] = None,
            vendor: MarketDataVendor = MarketDataVendor.Goldman_Sachs,
            pricing_location: Optional[PricingLocation] = None,
            **kwargs) -> Union[pd.Series, Tuple[pd.Series]]:
        """
        Get coordinates data series

        :param coordinates: market data coordinate(s)
        :param start: start date or time
        :param end: end date or time
        :param vendor: data vendor
        :param pricing_location: the location where close data has been recorded (not used for real-time query)
        :param kwargs: Extra query arguments
        :return: Series of the returned data

        **Examples**

        >>> coordinate = ("FX Fwd_USD/EUR_Fwd Pt_2y",)
        >>> data = GsDataApi.coordinates_data_series(coordinate, dt.datetime(2019, 11, 18), dt.datetime(2019, 11, 19))
        """
        dfs = cls.coordinates_data(coordinates,
                                   start=start,
                                   end=end,
                                   pricing_location=pricing_location,
                                   vendor=vendor,
                                   as_multiple_dataframes=True,
                                   **kwargs)

        ret = tuple(pd.Series() if df.empty else pd.
                    Series(index=df.index, data=df.value.values) for df in dfs)
        if isinstance(coordinates, (MarketDataCoordinate, str)):
            return ret[0]
        else:
            return ret

    @staticmethod
    @cachetools.cached(TTLCache(ttl=3600, maxsize=128))
    def get_types(dataset_id: str):
        results = GsSession.current._get(f'/data/catalog/{dataset_id}')
        fields = results.get("fields")
        if fields:
            field_types = {}
            for key, value in fields.items():
                field_type = value.get('type')
                field_format = value.get('format')
                field_types[key] = field_format or field_type
            return field_types
        raise RuntimeError(f"Unable to get Dataset schema for {dataset_id}")

    @classmethod
    def construct_dataframe_with_types(
            cls, dataset_id: str, data: Union[Base, List,
                                              Tuple]) -> pd.DataFrame:
        """
        Constructs a dataframe with correct date types.
        :param dataset_id: id of the dataset
        :param data: data to convert with correct types
        :return: dataframe with correct types
        """
        if len(data):
            dataset_types = cls.get_types(dataset_id)
            df = pd.DataFrame(data)

            for field_name, type_name in dataset_types.items():
                if df.get(field_name) is not None and type_name in (
                        'date', 'date-time'):
                    df = df.astype({field_name: numpy.datetime64})

            field_names = dataset_types.keys()

            if 'date' in field_names:
                df = df.set_index('date')
            elif 'time' in field_names:
                df = df.set_index('time')

            return df
        else:
            return pd.DataFrame({})
예제 #23
0
import requests
import csv
from cachetools import cached, TTLCache
from app.utils import date as date_util

from . import countrycodes as cc
"""
Base URL for fetching data.
"""
base_url = 'https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-%s.csv'


@cached(cache=TTLCache(maxsize=1024, ttl=3600))
def get_data(category):
    """
    Retrieves the data for the provided type. The data is cached for 1 hour.
    """

    # Adhere to category naming standard.
    category = category.lower().capitalize()

    # Request the data
    request = requests.get(base_url % category)
    text = request.text

    # Parse the CSV.
    data = list(csv.DictReader(text.splitlines()))

    # The normalized locations.
    locations = []
예제 #24
0
specific language governing permissions and limitations under the License.
"""

from cachetools import cached, TTLCache
from cachetools.keys import hashkey

from django.utils.translation import ugettext_lazy as _

from pipeline.component_framework.models import ComponentModel

from plugin_service.plugin_client import PluginServiceApiClient
from plugin_service import env
from gcloud.analysis_statistics.models import TemplateNodeStatistics


@cached(cache=TTLCache(maxsize=1024, ttl=60), key=hashkey)
def get_remote_plugin_name(limit=100, offset=0):
    """
    @summary: 拉取第三方插件名
    @param: limit: 拉取第三方插件数据默认limit,默认为100
    @param: offset: 拉取第三方插件分页起点,默认为0
    return plugin_info:dict() 第三方插件信息
    """
    TOTAL = limit
    CUR_TOTAL = 0

    plugin_info = {}
    if not env.USE_PLUGIN_SERVICE == "1":
        return plugin_info
    while CUR_TOTAL < TOTAL:
        result = PluginServiceApiClient.get_paas_plugin_info(
예제 #25
0
import pandas as pd
import streamlit as st
import urllib.request, json
import area as ar
from cachetools import cached, TTLCache

plz_name = None
cases = None


# Streamlit cache doesn't work here
@cached(cache=TTLCache(maxsize=1, ttl=60 * 60 * 1))
def load_data():
    #PLZ to Name Mapping
    plz_name = dict({})
    plz_area = dict({})
    districts = pd.read_csv('Districts.tsv', header=None, sep='\t')

    plz_district = dict([(str(i), str(v))
                         for i, v in zip(districts[0], districts[2])])
    with urllib.request.urlopen(
            "https://raw.githubusercontent.com/openZH/covid_19/master/fallzahlen_plz/PLZ_gen_epsg4326_F_KTZH_2020.json"
    ) as url:
        data = json.loads(url.read().decode())
        for feature in data['features']:
            plz = (feature['properties']['PLZ'])
            name = feature['properties']['Ortschaftsname']
            area = ar.area(feature['geometry']) / 1e6
            if plz is not None and name is not None:
                plz_str = str(int(plz))
                plz_name[plz_str] = str(name)
예제 #26
0
파일: base.py 프로젝트: Catstyle/applied
 def __init__(self, ttl):
     self.ttl = ttl
     self.values = TTLCache(maxsize=1024, ttl=ttl / 1000)
class BinanceAPIManager:
    def __init__(self, config: Config, db: Database, logger: Logger):
        # initializing the client class calls `ping` API endpoint, verifying the connection
        self.binance_client = Client(
            config.BINANCE_API_KEY,
            config.BINANCE_API_SECRET_KEY,
            tld=config.BINANCE_TLD,
        )
        self.db = db
        self.logger = logger
        self.config = config

        self.cache = BinanceCache()
        self.stream_manager: Optional[BinanceStreamManager] = None
        self.setup_websockets()

    def setup_websockets(self):
        self.stream_manager = BinanceStreamManager(
            self.cache,
            self.config,
            self.binance_client,
            self.logger,
        )

    @cached(cache=TTLCache(maxsize=1, ttl=43200))
    def get_trade_fees(self) -> Dict[str, float]:
        return {
            ticker["symbol"]: ticker["taker"]
            for ticker in self.binance_client.get_trade_fee()["tradeFee"]
        }

    @cached(cache=TTLCache(maxsize=1, ttl=60))
    def get_using_bnb_for_fees(self):
        return self.binance_client.get_bnb_burn_spot_margin()["spotBNBBurn"]

    def get_fee(self, origin_coin: Coin, target_coin: Coin, selling: bool):
        fees = self.get_trade_fees()
        if not fees:
            base_fee = 0.001
        else:
            base_fee = fees[origin_coin + target_coin]

        if not self.get_using_bnb_for_fees():
            return base_fee

        # The discount is only applied if we have enough BNB to cover the fee
        amount_trading = (self._sell_quantity(origin_coin.symbol,
                                              target_coin.symbol)
                          if selling else self._buy_quantity(
                              origin_coin.symbol, target_coin.symbol))

        fee_amount = amount_trading * base_fee * 0.75
        if origin_coin.symbol == "BNB":
            fee_amount_bnb = fee_amount
        else:
            origin_price = self.get_ticker_price(origin_coin + Coin("BNB"))
            if origin_price is None:
                return base_fee
            fee_amount_bnb = fee_amount * origin_price

        bnb_balance = self.get_currency_balance("BNB")

        if bnb_balance >= fee_amount_bnb:
            return base_fee * 0.75
        return base_fee

    def get_account(self):
        """
        Get account information
        """
        return self.binance_client.get_account()

    def get_ticker_price(self, ticker_symbol: str):
        """
        Get ticker price of a specific coin
        """
        price = self.cache.ticker_values.get(ticker_symbol, None)
        if price is None and ticker_symbol not in self.cache.non_existent_tickers:
            self.cache.ticker_values = {
                ticker["symbol"]: float(ticker["price"])
                for ticker in self.binance_client.get_symbol_ticker()
            }
            self.logger.debug(
                f"Fetched all ticker prices: {self.cache.ticker_values}")
            price = self.cache.ticker_values.get(ticker_symbol, None)
            if price is None:
                self.logger.debug(
                    f"Ticker does not exist: {ticker_symbol} - will not be fetched from now on"
                )
                self.cache.non_existent_tickers.add(ticker_symbol)

        return price

    def get_currency_balance(self, currency_symbol: str, force=False) -> float:
        """
        Get balance of a specific coin
        """
        with self.cache.open_balances() as cache_balances:
            balance = cache_balances.get(currency_symbol, None)
            if force or balance is None:
                cache_balances.clear()
                cache_balances.update({
                    currency_balance["asset"]: float(currency_balance["free"])
                    for currency_balance in self.binance_client.get_account()
                    ["balances"]
                })
                self.logger.debug(f"Fetched all balances: {cache_balances}")
                if currency_symbol not in cache_balances:
                    cache_balances[currency_symbol] = 0.0
                    return 0.0
                return cache_balances.get(currency_symbol, 0.0)

            return balance

    def retry(self, func, *args, **kwargs):
        time.sleep(1)
        attempts = 0
        while attempts < 20:
            try:
                return func(*args, **kwargs)
            except Exception:  # pylint: disable=broad-except
                self.logger.warning(
                    f"Failed to Buy/Sell. Trying Again (attempt {attempts}/20)"
                )
                if attempts == 0:
                    self.logger.warning(traceback.format_exc())
                attempts += 1
        return None

    def get_symbol_filter(self, origin_symbol: str, target_symbol: str,
                          filter_type: str):
        return next(_filter for _filter in self.binance_client.get_symbol_info(
            origin_symbol + target_symbol)["filters"]
                    if _filter["filterType"] == filter_type)

    @cached(cache=TTLCache(maxsize=2000, ttl=43200))
    def get_alt_tick(self, origin_symbol: str, target_symbol: str):
        step_size = self.get_symbol_filter(origin_symbol, target_symbol,
                                           "LOT_SIZE")["stepSize"]
        if step_size.find("1") == 0:
            return 1 - step_size.find(".")
        return step_size.find("1") - 1

    @cached(cache=TTLCache(maxsize=2000, ttl=43200))
    def get_min_notional(self, origin_symbol: str, target_symbol: str):
        return float(
            self.get_symbol_filter(origin_symbol, target_symbol,
                                   "MIN_NOTIONAL")["minNotional"])

    def _wait_for_order(
        self, order_id, origin_symbol: str, target_symbol: str
    ) -> Optional[BinanceOrder]:  # pylint: disable=unsubscriptable-object
        while True:
            order_status: BinanceOrder = self.cache.orders.get(order_id, None)
            if order_status is not None:
                break
            self.logger.debug(f"Waiting for order {order_id} to be created")
            time.sleep(1)

        self.logger.debug(f"Order created: {order_status}")

        while order_status.status != "FILLED":
            try:
                order_status = self.cache.orders.get(order_id, order_status)

                self.logger.debug(f"Waiting for order {order_id} to be filled")

                if self._should_cancel_order(order_status):
                    cancel_order = None
                    while cancel_order is None:
                        cancel_order = self.binance_client.cancel_order(
                            symbol=origin_symbol + target_symbol,
                            orderId=order_id)
                    self.logger.info("Order timeout, canceled...")

                    # sell partially
                    if order_status.status == "PARTIALLY_FILLED" and order_status.side == "BUY":
                        self.logger.info("Sell partially filled amount")

                        order_quantity = self._sell_quantity(
                            origin_symbol, target_symbol)
                        partially_order = None
                        while partially_order is None:
                            partially_order = self.binance_client.order_market_sell(
                                symbol=origin_symbol + target_symbol,
                                quantity=order_quantity)

                    self.logger.info("Going back to scouting mode...")
                    return None

                if order_status.status == "CANCELED":
                    self.logger.info(
                        "Order is canceled, going back to scouting mode...")
                    return None

                time.sleep(1)
            except BinanceAPIException as e:
                self.logger.info(e)
                time.sleep(1)
            except Exception as e:  # pylint: disable=broad-except
                self.logger.info(f"Unexpected order error: {e}")
                time.sleep(1)

        self.logger.debug(f"Order filled: {order_status}")

        return order_status

    def wait_for_order(
        self, order_id, origin_symbol: str, target_symbol: str,
        order_guard: OrderGuard
    ) -> Optional[BinanceOrder]:  # pylint: disable=unsubscriptable-object
        with order_guard:
            return self._wait_for_order(order_id, origin_symbol, target_symbol)

    def _should_cancel_order(self, order_status):
        minutes = (time.time() - order_status.time / 1000) / 60
        timeout = 0

        if order_status.side == "SELL":
            timeout = float(self.config.SELL_TIMEOUT)
        else:
            timeout = float(self.config.BUY_TIMEOUT)

        if timeout and minutes > timeout and order_status.status == "NEW":
            return True

        if timeout and minutes > timeout and order_status.status == "PARTIALLY_FILLED":
            if order_status.side == "SELL":
                return True

            if order_status.side == "BUY":
                current_price = self.get_ticker_price(order_status.symbol)
                if float(current_price) * (1 - 0.001) > float(
                        order_status.price):
                    return True

        return False

    def buy_alt(self, origin_coin: Coin, target_coin: Coin) -> BinanceOrder:
        return self.retry(self._buy_alt, origin_coin, target_coin)

    def _buy_quantity(self,
                      origin_symbol: str,
                      target_symbol: str,
                      target_balance: float = None,
                      from_coin_price: float = None):
        target_balance = target_balance or self.get_currency_balance(
            target_symbol)
        from_coin_price = from_coin_price or self.get_ticker_price(
            origin_symbol + target_symbol)

        origin_tick = self.get_alt_tick(origin_symbol, target_symbol)
        return math.floor(target_balance * 10**origin_tick /
                          from_coin_price) / float(10**origin_tick)

    def _buy_alt(self, origin_coin: Coin, target_coin: Coin):
        """
        Buy altcoin
        """
        trade_log = self.db.start_trade_log(origin_coin, target_coin, False)
        origin_symbol = origin_coin.symbol
        target_symbol = target_coin.symbol

        with self.cache.open_balances() as balances:
            balances.clear()

        origin_balance = self.get_currency_balance(origin_symbol)
        target_balance = self.get_currency_balance(target_symbol)
        from_coin_price = self.get_ticker_price(origin_symbol + target_symbol)

        order_quantity = self._buy_quantity(origin_symbol, target_symbol,
                                            target_balance, from_coin_price)
        self.logger.info(f"Buying roughly {order_quantity} {origin_symbol}")

        # Try to buy until successful
        order = None
        order_guard = self.stream_manager.acquire_order_guard()
        while order is None:
            try:
                order = self.binance_client.order_limit_buy(
                    symbol=origin_symbol + target_symbol,
                    quantity=order_quantity,
                    price=from_coin_price,
                )
                self.logger.debug(order)
            except BinanceAPIException as e:
                self.logger.info(e)
                time.sleep(1)
            except Exception as e:  # pylint: disable=broad-except
                self.logger.warning(f"Unexpected Error: {e}")

        orderId = order["orderId"]
        self.logger.info(
            f"Placed buy order {orderId}, waiting for it to complete")
        trade_log.set_ordered(origin_balance, target_balance, order_quantity)

        order_guard.set_order(origin_symbol, target_symbol, int(orderId))
        order = self.wait_for_order(orderId, origin_symbol, target_symbol,
                                    order_guard)

        if order is None:
            return None

        newBalance = self.get_currency_balance(origin_symbol)
        self.logger.info(f"Bought {newBalance} {origin_symbol}")

        trade_log.set_complete(order.cumulative_quote_qty)

        return order

    def sell_alt(self, origin_coin: Coin, target_coin: Coin) -> BinanceOrder:
        return self.retry(self._sell_alt, origin_coin, target_coin)

    def _sell_quantity(self,
                       origin_symbol: str,
                       target_symbol: str,
                       origin_balance: float = None):
        origin_balance = origin_balance or self.get_currency_balance(
            origin_symbol)

        origin_tick = self.get_alt_tick(origin_symbol, target_symbol)
        return math.floor(origin_balance * 10**origin_tick) / float(
            10**origin_tick)

    def _sell_alt(self, origin_coin: Coin, target_coin: Coin):
        """
        Sell altcoin
        """
        trade_log = self.db.start_trade_log(origin_coin, target_coin, True)
        origin_symbol = origin_coin.symbol
        target_symbol = target_coin.symbol

        with self.cache.open_balances() as balances:
            balances.clear()

        origin_balance = self.get_currency_balance(origin_symbol)
        target_balance = self.get_currency_balance(target_symbol)
        from_coin_price = self.get_ticker_price(origin_symbol + target_symbol)

        order_quantity = self._sell_quantity(origin_symbol, target_symbol,
                                             origin_balance)
        self.logger.info(f"Selling {order_quantity} {origin_symbol}")

        self.logger.debug(f"Balance is {origin_balance}")
        order = None
        order_guard = self.stream_manager.acquire_order_guard()
        while order is None:
            # Should sell at calculated price to avoid lost coin
            self.logger.debug("Attempting to place order")
            order = self.binance_client.order_limit_sell(
                symbol=origin_symbol + target_symbol,
                quantity=order_quantity,
                price=from_coin_price)
            self.logger.debug(f"order: {order}")
        orderId = order["orderId"]

        self.logger.info(
            f"Placed sell order {orderId}, waiting for it to complete")

        trade_log.set_ordered(origin_balance, target_balance, order_quantity)

        order_guard.set_order(origin_symbol, target_symbol,
                              int(order["orderId"]))
        order = self.wait_for_order(order["orderId"], origin_symbol,
                                    target_symbol, order_guard)

        if order is None:
            return None

        new_balance = self.get_currency_balance(origin_symbol)
        while new_balance >= origin_balance:
            new_balance = self.get_currency_balance(origin_symbol, True)

        new_target_balance = self.get_currency_balance(target_symbol)
        self.logger.info(
            f"Sold {origin_symbol} for {new_target_balance} {target_symbol}")

        trade_log.set_complete(order.cumulative_quote_qty)

        return order
예제 #28
0
load_dotenv()
TOKEN = os.getenv('TOKEN', None)
if TOKEN is None:
    print('Set TOKEN="YOUR_TOKEN" in the .env file')
    sys.exit(1)

game = discord.Game("BROWSING NANO DOCS")

description = '''NanoDocs BOT - Will send info upon request'''
bot = commands.Bot(command_prefix='#', description=description)
bot.remove_command("help")
datenow = datetime.datetime.now()

# 1 hour cache for RPC
cache = TTLCache(maxsize=10, ttl=3600)


@bot.event
async def on_ready():
    print('Logged in as')
    print(bot.user.name)
    print(bot.user.id)
    print('------')
    print('NanoDocs Online')
    print(datenow)
    await bot.change_presence(status=discord.Status.online, activity=game)


def loadRPCdescr():
    try:
예제 #29
0
    async def get(self, loc_id):  # pylint: disable=arguments-differ
        # Get location at the index equal to provided id.
        locations = await self.get_all()
        return locations[loc_id]


# ---------------------------------------------------------------


# Base URL for fetching category.
BASE_URL = (
    "https://raw.githubusercontent.com/CSSEGISandData/2019-nCoV/master/csse_covid_19_data/csse_covid_19_time_series/"
)


@cached(cache=TTLCache(maxsize=128, ttl=1800))
async def get_category(category):
    """
    Retrieves the data for the provided category. The data is cached for 30 minutes locally, 1 hour via shared Redis.

    :returns: The data for category.
    :rtype: dict
    """
    # Adhere to category naming standard.
    category = category.lower()
    data_id = f"jhu.{category}"

    # check shared cache
    cache_results = await check_cache(data_id)
    if cache_results:
        LOGGER.info(f"{data_id} using shared cache results")
예제 #30
0
파일: helpers.py 프로젝트: apluslms/a-plus
            roman += letters[i]
            number -= numbers[i]
    return roman


def settings_text(key):
    def get(name):
        if hasattr(settings, name):
            return getattr(settings, name)
        return None

    return get('{}_{}'.format(
        key, (get_language() or settings.LANGUAGE_CODE).upper())) or get(key)


@cached(TTLCache(100, ttl=30))
def get_url_ip_address_list(url):
    """
    This function takes a full URL as a parameter and returns the IP addresses
    of the host as a string.

    It will cache results for 30 seconds, so repeated calls return fast
    """
    hostname = urlsplit(url).hostname
    assert hostname, "Invalid url: no hostname found"
    ips = (a[4][0] for a in socket.getaddrinfo(
        hostname, None, 0, socket.SOCK_STREAM, socket.IPPROTO_TCP))
    return tuple(set(ips))


def get_remote_addr(request):