Пример #1
0
def test_load_erroneous_csv():
    err_csv = (b"id,name,category_id/id\n"
               b"__test__.partner_fail,Test, xmlid_not_found\n")
    csv_stream = BytesIO()
    csv_stream.write(err_csv)
    csv_stream.seek(0)
    with anthem.cli.Context(None, anthem.cli.Options(test_mode=True)) as ctx:
        with pytest.raises(AnthemError):
            load_csv_stream(ctx, 'res.partner', csv_stream, delimiter=',')
Пример #2
0
def test_load_erroneous_csv():
    err_csv = (b"id,name,category_id/id\n"
               b"__test__.partner_fail,Test, xmlid_not_found\n")
    csv_stream = BytesIO()
    csv_stream.write(err_csv)
    csv_stream.seek(0)
    with anthem.cli.Context(None, anthem.cli.Options(test_mode=True)) as ctx:
        with pytest.raises(AnthemError):
            load_csv_stream(ctx, 'res.partner', csv_stream, delimiter=',')
Пример #3
0
def load_csv(ctx, path, model, delimiter=",", header=None, header_exclude=None):
    content = resource_stream(req, path)
    load_csv_stream(
        ctx,
        model,
        content,
        delimiter=delimiter,
        header=header,
        header_exclude=header_exclude,
    )
Пример #4
0
def test_load_csv_stream_model():
    csv_stream = BytesIO()
    csv_stream.write(csv_partner)
    csv_stream.seek(0)
    with anthem.cli.Context(None, anthem.cli.Options(test_mode=True)) as ctx:
        load_csv_stream(ctx, ctx.env['res.partner'], csv_stream, delimiter=',')
        partner1 = ctx.env.ref('__test__.partner1', raise_if_not_found=False)
        assert partner1
        assert partner1.name == 'Partner 1'
        partner2 = ctx.env.ref('__test__.partner2', raise_if_not_found=False)
        assert partner2
        assert partner2.name == 'Partner 2'
Пример #5
0
def test_load_csv_stream_model():
    csv_stream = BytesIO()
    csv_stream.write(csv_partner)
    csv_stream.seek(0)
    with anthem.cli.Context(None, anthem.cli.Options(test_mode=True)) as ctx:
        load_csv_stream(ctx, ctx.env['res.partner'], csv_stream, delimiter=',')
        partner1 = ctx.env.ref('__test__.partner1', raise_if_not_found=False)
        assert partner1
        assert partner1.name == 'Partner 1'
        partner2 = ctx.env.ref('__test__.partner2', raise_if_not_found=False)
        assert partner2
        assert partner2.name == 'Partner 2'
Пример #6
0
def test_load_csv_stream_model_string():
    """ Pass string instead of model to load_csv_stream """
    csv_stream = StringIO()
    csv_stream.write(csv_partner)
    csv_stream.seek(0)
    with anthem.cli.Context(None, anthem.cli.Options(test_mode=True)) as ctx:
        load_csv_stream(ctx, 'res.partner', csv_stream, delimiter=',')
        partner1 = ctx.env.ref('__test__.partner1', raise_if_not_found=False)
        assert partner1
        assert partner1.name == 'Partner 1'
        partner2 = ctx.env.ref('__test__.partner2', raise_if_not_found=False)
        assert partner2
        assert partner2.name == 'Partner 2'
Пример #7
0
def load_csv(ctx,
             path,
             model,
             delimiter=',',
             header=None,
             header_exclude=None):
    ctx.log_line(
        '[DEPRECATED] use `from anthem.lyrics.loaders import load_csv. `'
        '\nUpgrade `anthem` to version > 0.11.0. '
        '\nUpgrade `docker-odoo-project` image > 2.5.1 '
        'or set `ODOO_DATA_PATH=/odoo/data` in Dockerfile.')
    content = resource_stream(req, path)
    load_csv_stream(ctx,
                    model,
                    content,
                    delimiter=delimiter,
                    header=header,
                    header_exclude=header_exclude)
Пример #8
0
def load_csv_parallel(ctx,
                      model,
                      csv_path,
                      defer_parent_computation=True,
                      delimiter=','):
    """Use me to load an heavy file ~2k of lines or more.

    Then calling this method as a parameter of importer.sh

    importer.sh will split the file in chunks per number of processor
    and per 500.
    This method will be called once per chunk in order to do the csv loading
    on multiple processes.

    Usage::

        @anthem.log
        def setup_locations(ctx):
            load_csv_parallel(
                ctx,
                'stock.location',
                'data/install/stock.location.csv',
                defer_parent_computation=True)

    Then in `migration.yml`::

        - importer.sh songs.install.inventory::setup_locations /odoo/data/install/stock.location.csv
        # if defer_parent_computation=True
        - anthem songs.install.inventory::location_compute_parents

    """ # noqa
    load_ctx = ctx.env.context.copy()
    if defer_parent_computation:
        load_ctx.update({'defer_parent_store_computation': 'manually'})
    if isinstance(model, str):
        model = ctx.env[model]
    model = model.with_context(**load_ctx)
    for content in get_files(csv_path):
        load_csv_stream(ctx, model, content, delimiter=delimiter)
Пример #9
0
def load_csv(ctx, path, model, delimiter=','):
    content = resource_stream(req, path)
    load_csv_stream(ctx, model, content, delimiter=delimiter)