Exemplo n.º 1
0
    def _get_cached_asset_movements(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        end_at_least_ts: Timestamp,
    ) -> List[AssetMovement]:
        """
        Attetmps to read the cache of asset movements and returns a list of them.

        If there is a problem can raise HistoryCacheInvalid
        """
        assetmovementsfile_path = os.path.join(
            self.user_directory,
            ASSETMOVEMENTS_HISTORYFILE,
        )
        asset_movements_contents = get_jsonfile_contents_or_empty_dict(
            FilePath(assetmovementsfile_path), )
        asset_movements_history_is_okay = data_up_todate(
            asset_movements_contents,
            start_ts,
            end_at_least_ts,
        )
        if not asset_movements_history_is_okay:
            raise HistoryCacheInvalid('Asset Movements cache is invalid')

        try:
            asset_movements = asset_movements_from_dictlist(
                asset_movements_contents['data'],
                start_ts,
                end_ts,
            )
        except KeyError:
            raise HistoryCacheInvalid('Asset Movements cache is invalid')

        return asset_movements
Exemplo n.º 2
0
 def __init__(self, args: argparse.Namespace) -> None:
     user_path = FilePath(
         os.path.join(str(default_data_directory()), args.user_name))
     self.db = DBHandler(
         user_data_dir=user_path,
         password=args.user_password,
         msg_aggregator=MessagesAggregator(),
     )
Exemplo n.º 3
0
    def __init__(self, data_directory: FilePath) -> None:
        self.prefix = 'https://min-api.cryptocompare.com/data/'
        self.data_directory = data_directory
        self.price_history: Dict[PairCacheKey, PriceHistoryData] = dict()
        self.price_history_file: Dict[PairCacheKey, FilePath] = dict()

        # Check the data folder and remember the filenames of any cached history
        prefix = os.path.join(self.data_directory, 'price_history_')
        prefix = prefix.replace('\\', '\\\\')
        regex = re.compile(prefix + r'(.*)\.json')
        files_list = glob.glob(prefix + '*.json')

        for file_ in files_list:
            file_ = FilePath(file_.replace('\\\\', '\\'))
            match = regex.match(file_)
            assert match
            cache_key = PairCacheKey(match.group(1))
            self.price_history_file[cache_key] = file_
Exemplo n.º 4
0
def default_data_directory() -> FilePath:
    home = os.path.expanduser("~")
    data_directory = os.path.join(home, '.rotkehlchen')
    try:
        os.makedirs(data_directory)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise

    return FilePath(data_directory)
Exemplo n.º 5
0
def accounting_data_dir(use_clean_caching_directory, tmpdir_factory) -> FilePath:
    """For accounting we have a dedicated test data dir so that it's easy to
    cache the results of the historic price queries also in Travis"""
    if use_clean_caching_directory:
        return FilePath(tmpdir_factory.mktemp('accounting_data'))

    home = os.path.expanduser("~")
    if 'TRAVIS' in os.environ:
        data_directory = os.path.join(home, '.cache', '.rotkehlchen-test-dir')
    else:
        data_directory = os.path.join(home, '.rotkehlchen', 'tests_data_directory')

    try:
        os.makedirs(data_directory)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise

    return FilePath(data_directory)
Exemplo n.º 6
0
    def __init__(self, data_directory: FilePath, database: Optional[DBHandler]) -> None:
        super().__init__(database=database, service_name=ExternalService.CRYPTOCOMPARE)
        self.data_directory = data_directory
        self.price_history: Dict[PairCacheKey, PriceHistoryData] = {}
        self.price_history_file: Dict[PairCacheKey, FilePath] = {}
        self.session = requests.session()
        self.session.headers.update({'User-Agent': 'rotkehlchen'})

        # Check the data folder and remember the filenames of any cached history
        prefix = os.path.join(self.data_directory, 'price_history_')
        prefix = prefix.replace('\\', '\\\\')
        regex = re.compile(prefix + r'(.*)\.json')
        files_list = glob.glob(prefix + '*.json')

        for file_ in files_list:
            file_ = FilePath(file_.replace('\\\\', '\\'))
            match = regex.match(file_)
            assert match
            cache_key = PairCacheKey(match.group(1))
            self.price_history_file[cache_key] = file_
Exemplo n.º 7
0
    def create_files(self, dirpath: Path) -> Tuple[bool, str]:
        if not self.create_csv:
            return True, ''

        try:
            if not dirpath.exists():
                os.makedirs(dirpath)

            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_TRADES_CSV)),
                self.trades_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_LOAN_PROFITS_CSV)),
                self.loan_profits_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_ASSET_MOVEMENTS_CSV)),
                self.asset_movements_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_GAS_CSV)),
                self.tx_gas_costs_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_MARGIN_CSV)),
                self.margin_positions_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_LOAN_SETTLEMENTS_CSV)),
                self.loan_settlements_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_DEFI_EVENTS_CSV)),
                self.defi_events_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, FILENAME_ALL_CSV)),
                self.all_events_csv,
            )
        except PermissionError as e:
            return False, str(e)

        return True, ''
Exemplo n.º 8
0
    def compress_and_encrypt_db(self, password: str) -> Tuple[bytes, str]:
        """Decrypt the DB, dump in temporary plaintextdb, compress it,
        and then re-encrypt it

        Returns a b64 encoded binary blob"""
        log.info('Compress and encrypt DB')
        with tempfile.TemporaryDirectory() as tmpdirname:
            tempdb = FilePath(os.path.join(tmpdirname, 'temp.db'))
            self.db.export_unencrypted(tempdb)
            with open(tempdb, 'rb') as f:
                data_blob = f.read()

        original_data_hash = base64.b64encode(
            hashlib.sha256(data_blob).digest(), ).decode()
        compressed_data = zlib.compress(data_blob, level=9)
        encrypted_data = encrypt(password.encode(), compressed_data)

        return encrypted_data.encode(), original_data_hash
Exemplo n.º 9
0
    def unlock(
        self,
        username: str,
        password: str,
        create_new: bool,
    ) -> FilePath:
        user_data_dir = FilePath(os.path.join(self.data_directory, username))
        if create_new:
            if os.path.exists(user_data_dir):
                raise AuthenticationError(
                    'User {} already exists'.format(username))
            else:
                os.mkdir(user_data_dir)
        else:
            if not os.path.exists(user_data_dir):
                raise AuthenticationError(
                    'User {} does not exist'.format(username))

            if not os.path.exists(os.path.join(user_data_dir,
                                               'rotkehlchen.db')):
                # This is bad. User directory exists but database is missing.
                # Make a backup of the directory that user should probably remove
                # on their own. At the same time delete the directory so that a new
                # user account can be created
                shutil.move(
                    user_data_dir,
                    os.path.join(
                        self.data_directory,
                        f'auto_backup_{username}_{ts_now()}',
                    ),
                )

                raise AuthenticationError(
                    'User {} exists but DB is missing. Somehow must have been manually '
                    'deleted or is corrupt. Please recreate the user account. '
                    'A backup of the user directory was created.'.format(
                        username))

        self.db: DBHandler = DBHandler(user_data_dir, password,
                                       self.msg_aggregator)
        self.user_data_dir = user_data_dir
        self.logged_in = True
        self.username = username
        return user_data_dir
Exemplo n.º 10
0
    def create_files(self, dirpath: FilePath) -> Tuple[bool, str]:
        if not self.create_csv:
            return True, ''

        try:
            if not os.path.exists(dirpath):
                os.makedirs(dirpath)

            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'trades.csv')),
                self.trades_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'loan_profits.csv')),
                self.loan_profits_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'asset_movements.csv')),
                self.asset_movements_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'tx_gas_costs.csv')),
                self.tx_gas_costs_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'margin_positions.csv')),
                self.margin_positions_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'loan_settlements.csv')),
                self.loan_settlements_csv,
            )
            _dict_to_csv_file(
                FilePath(os.path.join(dirpath, 'all_events.csv')),
                self.all_events_csv,
            )
        except (PermissionError, OSError) as e:
            return False, str(e)

        return True, ''
Exemplo n.º 11
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        historical_data_start: Timestamp,
    ) -> List[PriceHistoryEntry]:
        """
        Get historical price data from cryptocompare

        Returns a sorted list of price entries.
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )

        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        got_cached_value = self.got_cached_price(cache_key, timestamp)
        if got_cached_value:
            return self.price_history[cache_key].data

        now_ts = int(time.time())
        cryptocompare_hourquerylimit = 2000
        calculated_history: List = list()

        if historical_data_start <= timestamp:
            end_date = historical_data_start
        else:
            end_date = timestamp
        while True:
            pr_end_date = end_date
            end_date = Timestamp(end_date +
                                 (cryptocompare_hourquerylimit) * 3600)

            log.debug(
                'Querying cryptocompare for hourly historical price',
                from_asset=from_asset,
                to_asset=to_asset,
                cryptocompare_hourquerylimit=cryptocompare_hourquerylimit,
                end_date=end_date,
            )

            resp = self.query_endpoint_histohour(
                from_asset=from_asset,
                to_asset=to_asset,
                limit=2000,
                to_timestamp=end_date,
            )

            if pr_end_date != resp['TimeFrom']:
                # If we get more than we needed, since we are close to the now_ts
                # then skip all the already included entries
                diff = pr_end_date - resp['TimeFrom']
                # If the start date has less than 3600 secs difference from previous
                # end date then do nothing. If it has more skip all already included entries
                if diff >= 3600:
                    if resp['Data'][diff // 3600]['time'] != pr_end_date:
                        raise ValueError(
                            'Expected to find the previous date timestamp during '
                            'cryptocompare historical data fetching', )
                    # just add only the part from the previous timestamp and on
                    resp['Data'] = resp['Data'][diff // 3600:]

            # The end dates of a cryptocompare query do not match. The end date
            # can have up to 3600 secs different to the requested one since this is
            # hourly historical data but no more.
            end_dates_dont_match = (end_date < now_ts
                                    and resp['TimeTo'] != end_date)
            if end_dates_dont_match:
                if resp['TimeTo'] - end_date >= 3600:
                    raise ValueError(
                        'End dates do not match in a cryptocompare query')
                else:
                    # but if it's just a drift within an hour just update the end_date so that
                    # it can be picked up by the next iterations in the loop
                    end_date = resp['TimeTo']

            # If last time slot and first new are the same, skip the first new slot
            last_entry_equal_to_first = (len(calculated_history) != 0
                                         and calculated_history[-1]['time']
                                         == resp['Data'][0]['time'])
            if last_entry_equal_to_first:
                resp['Data'] = resp['Data'][1:]
            calculated_history += resp['Data']
            if end_date >= now_ts:
                break

        # Let's always check for data sanity for the hourly prices.
        assert _check_hourly_data_sanity(calculated_history, from_asset,
                                         to_asset)
        # and now since we actually queried the data let's also cache them
        filename = FilePath(
            os.path.join(self.data_directory,
                         'price_history_' + cache_key + '.json'), )
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=historical_data_start,
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': historical_data_start,
            'end_time': end_date,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)

        return self.price_history[cache_key].data
Exemplo n.º 12
0
def user_data_dir(data_dir, username) -> FilePath:
    """Create and return the user data directory"""
    user_data_dir = os.path.join(data_dir, username)
    if not os.path.exists(user_data_dir):
        os.mkdir(user_data_dir)
    return FilePath(user_data_dir)
Exemplo n.º 13
0
def session_data_dir(tmpdir_factory) -> FilePath:
    return FilePath(tmpdir_factory.mktemp('session_data'))
Exemplo n.º 14
0
def data_dir(tmpdir_factory) -> FilePath:
    return FilePath(tmpdir_factory.mktemp('data'))
Exemplo n.º 15
0
    def unlock(
        self,
        username: str,
        password: str,
        create_new: bool,
    ) -> FilePath:
        """Unlocks a user, either logging them in or creating a new user

        May raise:
        - SystemPermissionError if there are permission errors when accessing the DB
        or a directory in the user's filesystem
        - AuthenticationError if the given user does not exist, or if
        sqlcipher version problems are detected
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        """
        user_data_dir = FilePath(os.path.join(self.data_directory, username))
        if create_new:
            if os.path.exists(user_data_dir):
                raise AuthenticationError(
                    'User {} already exists'.format(username))
            else:
                try:
                    os.mkdir(user_data_dir)
                except PermissionError as e:
                    raise SystemPermissionError(
                        f'Failed to create directory for user: {str(e)}')

        else:
            if not os.path.exists(user_data_dir):
                raise AuthenticationError(
                    'User {} does not exist'.format(username))

            if not os.path.exists(os.path.join(user_data_dir,
                                               'rotkehlchen.db')):
                # This is bad. User directory exists but database is missing.
                # Make a backup of the directory that user should probably remove
                # on their own. At the same time delete the directory so that a new
                # user account can be created
                shutil.move(
                    user_data_dir,
                    os.path.join(
                        self.data_directory,
                        f'auto_backup_{username}_{ts_now()}',
                    ),
                )

                raise SystemPermissionError(
                    'User {} exists but DB is missing. Somehow must have been manually '
                    'deleted or is corrupt or access permissions do not allow reading. '
                    'Please recreate the user account. '
                    'A backup of the user directory was created.'.format(
                        username))

        self.db: DBHandler = DBHandler(user_data_dir, password,
                                       self.msg_aggregator)
        self.user_data_dir = user_data_dir
        self.logged_in = True
        self.username = username
        self.password = password
        return user_data_dir