Пример #1
0
def libor(ctx, start_date, end_date, config, use_intermediate_panel,
          libor_csv_path, intermediate_panel_path, *args, **kwargs):
    validate_dates(start_date, end_date)
    start_date = date(start_date.year, start_date.month, start_date.day)
    end_date = date(end_date.year, end_date.month, end_date.day)
    try:
        logging.basicConfig(level=logging.WARNING)
        config = read_config(file_path=config, command=ctx.command.name)
        libor_file_path = validate_file_path(libor_csv_path,
                                             config,
                                             file_path_key='libor_file_path')
        intermediate_panel_path = validate_file_path(
            intermediate_panel_path,
            config,
            file_path_key='intermediate_panel_path')

        if os.path.isdir(libor_file_path):
            click.echo(
                'Error: el path ingresado para tasas libor es un directorio')
            exit()
        elif os.path.isdir(intermediate_panel_path):
            click.echo(
                'Error: el path ingresado para el panel intermedio es un directorio'
            )
            exit()

        ensure_dir_exists(os.path.split(intermediate_panel_path)[0])
        ensure_dir_exists(os.path.split(libor_file_path)[0])

        validate_url_config(config)
        validate_url_has_value(config)
        validate_libor_rates_config(config)
        validate_libor_rates_has_values(config)

        timeout = (int(config.get('timeout'))
                   if 'timeout' in config.keys() else None)
        tries = int(config.get('tries', 1))

        scraper = BCRALiborScraper(
            url=config.get('url'),
            timeout=timeout,
            tries=tries,
            rates=config.get('rates'),
            use_intermediate_panel=use_intermediate_panel,
            intermediate_panel_path=intermediate_panel_path,
        )

        parsed = scraper.run(start_date, end_date)

        processed_header = scraper.preprocess_header(scraper.rates)
        parsed.reverse()
        write_file(processed_header, parsed, libor_file_path)

    except InvalidConfigurationError as err:
        click.echo(err)
Пример #2
0
def libor(ctx, start_date, end_date, config, skip_intermediate_panel_data, libor_csv_path,
          intermediate_panel_path, skip_clean_last_dates, *args, **kwargs):
    validate_dates(start_date, end_date)
    start_date = start_date.date()
    end_date = end_date.date()

    try:
        execution_start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        logging.basicConfig(level=logging.WARNING)
        config = read_config(file_path=config, command=ctx.command.name)
        libor_file_path = validate_file_path(libor_csv_path, config, file_path_key='libor_file_path')
        intermediate_panel_path = validate_file_path(intermediate_panel_path, config, file_path_key='intermediate_panel_path')

        if os.path.isdir(libor_file_path):
            click.echo('Error: el path ingresado para tasas libor es un directorio')
            exit()
        elif os.path.isdir(intermediate_panel_path):
            click.echo('Error: el path ingresado para el panel intermedio es un directorio')
            exit()

        ensure_dir_exists(os.path.split(intermediate_panel_path)[0])
        ensure_dir_exists(os.path.split(libor_file_path)[0])

        validate_url_config(config)
        validate_url_has_value(config)
        validate_libor_rates_config(config)
        validate_libor_rates_has_values(config)

        timeout = (
            int(config.get('timeout'))
            if 'timeout' in config.keys() else None
        )
        tries = int(config.get('tries', 1))

        scraper = BCRALiborScraper(
            url=config.get('url'),
            timeout=timeout,
            tries=tries,
            rates=config.get('rates'),
            skip_intermediate_panel_data=skip_intermediate_panel_data,
            intermediate_panel_path=intermediate_panel_path,
            skip_clean_last_dates=skip_clean_last_dates
        )

        parsed = scraper.run(start_date, end_date)

        processed_header = scraper.preprocess_header(scraper.rates)
        write_file(processed_header, parsed, libor_file_path)

        execution_end_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        Email().send_validation_group_email(execution_start_time, execution_end_time, start_date, end_date, skip_intermediate_panel_data, identifier='libor')

    except InvalidConfigurationError as err:
        click.echo(err)
    def test_run_not_using_intermediate_panel(self):

        start_date = datetime(2019, 4, 24)
        end_date = datetime(2019, 4, 24)

        url = '''
         http://www.bcra.gov.ar/PublicacionesEstadisticas/Tipo_de_cambio_sml.asp
        '''

        rates = {
            "30": "libor_30_dias",
            "60": "libor_60_dias",
            "90": "libor_90_dias",
            "180": "libor_180_dias",
            "360": "libor_360_dias"
        }

        parsed = [{
            'indice_tiempo': '2019-04-24',
            '30': '0.0248338',
            '60': '0.0254163',
            '90': '0.0258638',
            '180': '0.0261975',
            '360': '0.0272513'
        }]

        with patch.object(BCRALiborScraper, 'fetch_contents', return_value=''):
            with patch.object(BCRALiborScraper,
                              'parse_contents',
                              return_value=parsed):
                with patch.object(BCRALiborScraper,
                                  'preprocess_rows',
                                  return_value=parsed):
                    with patch.object(BCRALiborScraper,
                                      'save_intermediate_panel',
                                      return_value=''):

                        scraper = BCRALiborScraper(
                            url,
                            rates,
                            intermediate_panel_path=None,
                            use_intermediate_panel=False)
                        result = scraper.run(start_date, end_date)

                        assert result == [{
                            'indice_tiempo': '2019-04-24',
                            '30': '0.0248338',
                            '60': '0.0254163',
                            '90': '0.0258638',
                            '180': '0.0261975',
                            '360': '0.0272513'
                        }]
    def test_run_using_intermediate_panel(self):

        start_date = datetime(2019, 4, 24)
        end_date = datetime(2019, 4, 24)

        url = '''
         http://www.bcra.gov.ar/PublicacionesEstadisticas/Tipo_de_cambio_sml.asp
        '''

        rates = {
            "30": "libor_30_dias",
            "60": "libor_60_dias",
            "90": "libor_90_dias",
            "180": "libor_180_dias",
            "360": "libor_360_dias"
        }

        parsed = [
            {
                'indice_tiempo': '2019-04-24',
                'libor_30_dias': Decimal('0.0248588'),
                'libor_60_dias': Decimal('0.0253013'),
                'libor_90_dias': Decimal('0.025790'),
                'libor_180_dias': Decimal('0.026120'),
                'libor_360_dias': Decimal('0.027130')
            },
        ]

        with patch.object(BCRALiborScraper, 'fetch_contents', return_value=''):
            with patch.object(BCRALiborScraper,
                              'parse_from_intermediate_panel',
                              return_value=parsed):
                scraper = BCRALiborScraper(url,
                                           rates,
                                           intermediate_panel_path=None,
                                           use_intermediate_panel=True)
                result = scraper.run(start_date, end_date)

                assert result == [{
                    'indice_tiempo': '2019-04-24',
                    'libor_30_dias': Decimal('0.0248588'),
                    'libor_60_dias': Decimal('0.0253013'),
                    'libor_90_dias': Decimal('0.025790'),
                    'libor_180_dias': Decimal('0.026120'),
                    'libor_360_dias': Decimal('0.027130')
                }]
Пример #5
0
def libor(ctx, start_date, end_date, refetch_start_date, refetch_end_date,
          config, skip_intermediate_panel_data, libor_csv_path,
          intermediate_panel_path, skip_clean_last_dates, *args, **kwargs):
    try:
        execution_start_hour = time.time()

        validate_dates(start_date, end_date)
        start_date = start_date.date()
        end_date = end_date.date()
        refetch_dates_range = []
        if refetch_start_date and refetch_end_date:
            validate_refetch_dates(start_date, end_date,
                                   refetch_start_date.date(),
                                   refetch_end_date.date())
            refetch_dates_range = generate_dates_range(
                refetch_start_date.date(), refetch_end_date.date())
        elif refetch_start_date or refetch_end_date:
            logging.warning(
                'No se encontró fecha para refetch_start_date o refetch_end_date, no se hará refetch.'
            )
        execution_start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

        logging.basicConfig(format='%(message)s', level=logging.INFO)
        logging.info(Figlet(font='standard').renderText('scraper libor'))
        logging.info(f"Inicio de tiempo de ejecución: {execution_start_time}")

        config = read_config(file_path=config, command=ctx.command.name)
        libor_file_path = validate_file_path(libor_csv_path,
                                             config,
                                             file_path_key='libor_file_path')
        intermediate_panel_path = validate_file_path(
            intermediate_panel_path,
            config,
            file_path_key='intermediate_panel_path')

        if os.path.isdir(libor_file_path):
            click.echo(
                'Error: el path ingresado para tasas libor es un directorio')
            exit()
        elif os.path.isdir(intermediate_panel_path):
            click.echo(
                'Error: el path ingresado para el panel intermedio es un directorio'
            )
            exit()

        ensure_dir_exists(os.path.split(intermediate_panel_path)[0])
        ensure_dir_exists(os.path.split(libor_file_path)[0])

        validate_url_config(config)
        validate_url_has_value(config)
        validate_libor_rates_config(config)
        validate_libor_rates_has_values(config)

        timeout = (int(config.get('timeout'))
                   if 'timeout' in config.keys() else None)
        tries = int(config.get('tries', 1))

        scraper = BCRALiborScraper(
            url=config.get('url'),
            timeout=timeout,
            tries=tries,
            rates=config.get('rates'),
            skip_intermediate_panel_data=skip_intermediate_panel_data,
            intermediate_panel_path=intermediate_panel_path,
            skip_clean_last_dates=skip_clean_last_dates)

        parsed = scraper.run(start_date, end_date, refetch_dates_range)

        processed_header = scraper.preprocess_header(scraper.rates)

        write_file(processed_header, parsed.values(), libor_file_path)

        execution_end_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

        logging.info(f"Fin de tiempo de ejecución: {execution_end_time}")

        execution_end_hour = time.time()
        hours, rem = divmod(execution_end_hour - execution_start_hour, 3600)
        minutes, seconds = divmod(rem, 60)
        execution_total_time = "{:0>2}:{:0>2}:{:05.2f}".format(
            int(hours), int(minutes), seconds)
        Email().send_validation_group_email(execution_start_time,
                                            execution_end_time,
                                            execution_total_time,
                                            start_date,
                                            end_date,
                                            skip_intermediate_panel_data,
                                            identifier='libor')

    except InvalidConfigurationError as err:
        click.echo(err)