Esempio n. 1
0
    def query_online_deposits_withdrawals(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AssetMovement]:
        options: Dict[str, Union[str, int]] = {
            'pageSize': 200,  # max page size according to their docs
            'startDate': timestamp_to_iso8601(start_ts, utc_as_z=True),
            'endDate': timestamp_to_iso8601(end_ts, utc_as_z=True),
        }

        raw_data = self._paginated_api_query(endpoint='deposits/closed',
                                             options=options.copy())
        raw_data.extend(
            self._paginated_api_query(endpoint='withdrawals/closed',
                                      options=options.copy()), )
        log.debug('bittrex deposit/withdrawal history result',
                  results_num=len(raw_data))

        movements = []
        for raw_movement in raw_data:
            movement = self._deserialize_asset_movement(raw_movement)
            if movement and movement.timestamp >= start_ts and movement.timestamp <= end_ts:
                movements.append(movement)

        return movements
Esempio n. 2
0
    def query_online_deposits_withdrawals(
            self,  # pylint: disable=no-self-use
            start_ts: Timestamp,  # pylint: disable=unused-argument
            end_ts: Timestamp,  # pylint: disable=unused-argument
    ) -> List[AssetMovement]:
        if self.account_guids is None:
            self.query_balances()  # do a balance query to populate the account guids
        movements = []
        for guid in self.account_guids:  # type: ignore  # we know its not None
            try:
                resp = self._gather_paginated_data(
                    path='GetTransactions',
                    extra_options={
                        'accountGuid': guid,
                        'fromTimestampUtc': timestamp_to_iso8601(start_ts, utc_as_z=True),
                        'toTimestampUtc': timestamp_to_iso8601(end_ts, utc_as_z=True),
                        # if we filter by tx type in my tests I started getting
                        # {"Message":"A server error occurred. Please wait a few minutes and try again."}   # noqa: E501
                        # 'txTypes': 'Deposit,Withdrawal',  # there is also DepositFee
                    },
                )
            except KeyError as e:
                self.msg_aggregator.add_error(
                    f'Error processing IndependentReserve transactions response. '
                    f'Missing key: {str(e)}.',
                )
                return []

            for entry in resp:
                entry_type = entry.get('Type')
                if entry_type is None or entry_type not in ('Deposit', 'Withdrawal'):
                    continue

                try:
                    movement = _asset_movement_from_independentreserve(entry)
                    if movement:
                        movements.append(movement)
                except UnknownAsset as e:
                    self.msg_aggregator.add_warning(
                        f'Found unknown IndependentReserve asset {e.asset_name}. '
                        f'Ignoring the deposit/withdrawal containing it.',
                    )
                    continue
                except (DeserializationError, KeyError) as e:
                    msg = str(e)
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.msg_aggregator.add_error(
                        'Failed to deserialize an IndependentReserve deposit/withdrawal. '
                        'Check logs for details. Ignoring it.',
                    )
                    log.error(
                        'Error processing an IndependentReserve deposit/withdrawal.',
                        raw_asset_movement=entry,
                        error=msg,
                    )
                    continue

        return movements
Esempio n. 3
0
    def query_online_trade_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
            market: Optional[TradePair] = None,
    ) -> List[Trade]:
        options: Dict[str, Union[str, int]] = {
            'pageSize': 200,  # max page size according to their docs
            'startDate': timestamp_to_iso8601(start_ts, utc_as_z=True),
            'endDate': timestamp_to_iso8601(end_ts, utc_as_z=True),
        }
        if market is not None:
            options['marketSymbol'] = world_pair_to_bittrex(market)

        raw_data = self._paginated_api_query(endpoint='orders/closed', options=options)
        log.debug('bittrex order history result', results_num=len(raw_data))

        trades = []
        for raw_trade in raw_data:
            try:
                trade = trade_from_bittrex(raw_trade)
            except UnknownAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found bittrex trade with unknown asset '
                    f'{e.asset_name}. Ignoring it.',
                )
                continue
            except UnsupportedAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found bittrex trade with unsupported asset '
                    f'{e.asset_name}. Ignoring it.',
                )
                continue
            except UnprocessableTradePair as e:
                self.msg_aggregator.add_error(
                    f'Found bittrex trade with unprocessable pair '
                    f'{e.pair}. Ignoring it.',
                )
                continue
            except (DeserializationError, KeyError) as e:
                msg = str(e)
                if isinstance(e, KeyError):
                    msg = f'Missing key entry for {msg}.'
                self.msg_aggregator.add_error(
                    'Error processing a bittrex trade. Check logs '
                    'for details. Ignoring it.',
                )
                log.error(
                    'Error processing a bittrex trade',
                    trade=raw_trade,
                    error=msg,
                )
                continue

            trades.append(trade)

        return trades
Esempio n. 4
0
    def _generate_reports(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        report_type: Literal['fills', 'account'],
        tempdir: str,
    ) -> List[str]:
        """
        Generates all the reports to get historical data from coinbase.

        https://docs.pro.coinbase.com/#reports
        There are 2 type of reports.
        1. Fill reports which are per product id (market)
        2. Account reports which are per account id

        The fill reports have the following data format:
        portfolio,trade id,product,side,created at,size,size unit,price,fee,
        total,price/fee/total unit

        The account reports have the following data format:
        portfolio,type,time,amount,balance,amount/balance unit,transfer id,trade id,order id

        Returns a list of filepaths where the reports were written.

        - Raises the same exceptions as _api_query()
        - Can raise KeyError if the API does not return the expected response format.
        """
        start_date = timestamp_to_iso8601(start_ts)
        end_date = timestamp_to_iso8601(end_ts)

        if report_type == 'fills':
            account_or_product_ids = self._get_products_ids()
            identifier_key = 'product_id'
        else:
            account_or_product_ids = self._get_account_ids()
            identifier_key = 'account_id'

        report_ids = []
        options = {
            'type': report_type,
            'start_date': start_date,
            'end_date': end_date,
            'format': 'csv',
            # The only way to disable emailing the report link is to give an invalid link
            'email': '*****@*****.**',
        }
        for identifier in account_or_product_ids:
            options[identifier_key] = identifier
            post_result = self._api_query('reports',
                                          request_method='POST',
                                          options=options)
            report_ids.append(post_result['id'])

        # At this point all reports must have been queued for creation at the server
        # Now wait until they are ready and pull them one by one
        report_paths = []
        last_change_ts = ts_now()
        while True:
            finished_ids_indices = []
            for idx, report_id in enumerate(report_ids):
                get_result = self._api_query(f'reports/{report_id}',
                                             request_method='GET')
                # Have to add assert here for mypy since the endpoint string is
                # a variable string and can't be overloaded and type checked
                assert isinstance(get_result, dict)
                if get_result['status'] != 'ready':
                    continue
                # a report is ready here so let's reset the timer
                last_change_ts = ts_now()
                file_url = get_result['file_url']
                response = requests.get(file_url)
                length = len(response.content)
                # empty fill reports have length of 95, empty account reports 85
                # So we assume a report of more than 100 chars has data.
                if length > 100:
                    log.debug(
                        f'Got a populated report for id: {report_id}. Writing it to disk'
                    )
                    filepath = os.path.join(tempdir, f'report_{report_id}.csv')
                    with open(filepath, 'wb') as f:
                        f.write(response.content)
                    report_paths.append(filepath)
                else:
                    log.debug(
                        f'Got report for id: {report_id} with length {length}. Skipping it'
                    )

                finished_ids_indices.append(idx)

            if ts_now() - last_change_ts > SECS_TO_WAIT_FOR_REPORT:
                raise RemoteError(
                    f'There has been no response from CoinbasePro reports for over '
                    f' {MINS_TO_WAIT_FOR_REPORT} minutes. Bailing out.', )

            # Delete the report ids that have been downloaded. Note: reverse order
            # so that we don't mess up the indices
            for idx in reversed(finished_ids_indices):
                del report_ids[idx]

            # When there is no more ids to query break out of the loop
            if len(report_ids) == 0:
                break

        return report_paths