예제 #1
0
def sync(ctx: typer.Context,
         project: str = typer.Argument(
             ..., help='The name for the project, specified in config file'),
         since: datetime = typer.Option(..., formats=['%Y-%m-%d']),
         until: datetime = typer.Option(..., formats=['%Y-%m-%d']),
         dry: bool = typer.Option(
             False,
             help='Use log entries instead of uploading them to redmine'),
         drain: bool = typer.Option(
             False,
             help='Use drain issues for entries without specified dest')):
    config = setup_config(ctx, ctx.meta['config_path'])
    setup_http(ctx)

    ctx.meta['rdm_user'] = extract.get_redmine_user(config["redmine"]["url"])

    time_entries = get_toggl_enteries(config, project, since, until)

    issues = get_redmine_issues(config, project, since)

    issue_ids = petl.columns(issues)['id']
    entries_to_load, unset_entries = petl.biselect(
        time_entries, lambda row: row['issue_id'] in issue_ids)

    if drain and petl.nrows(unset_entries):
        log.info('Using drain')

        drained, unset_entries = drained_entries(ctx, issues, unset_entries,
                                                 project)

        log.info(f'Drained {petl.nrows(drained)} issues')

        entries_to_load = petl.cat(entries_to_load, drained)

    if petl.nrows(unset_entries):
        log.warning(f'There\'re {petl.nrows(unset_entries)} unset entries')

    if get_proj_attr(config, project, 'group_entries'):
        log.info('Using group by day and description')

        entries_to_load = transform.group_entries_by_day(entries_to_load)

    load.to_redmine_time(config["redmine"]["url"],
                         entries_to_load,
                         activity_id=get_proj_attr(config, project,
                                                   'rdm_activity_id'),
                         user_id=ctx.meta['rdm_user'].get('id'),
                         dry=dry)
예제 #2
0
파일: sql.py 프로젝트: ianfiske/petlx
def make_sqlalchemy_table(table, tablename, schema=None, constraints=True, metadata=None):
    """
    Create an SQLAlchemy table based on a :mod:`petl` table.

    Parameters
    ----------

    table : sequence of sequences (petl table)
        Table data to use to infer types etc.
    tablename : string
        Name of the table
    schema : string
        Name of the database schema to create the table in
    constraints : bool
        If True use length and nullable constraints
    metadata : sqlalchemy.MetaData
        Custom table metadata

    """

    try:
        import sqlalchemy
    except ImportError as e:
        raise UnsatisfiedDependency(e, dep_message)

    if not metadata:
        metadata = sqlalchemy.MetaData()

    sql_table = sqlalchemy.Table(tablename, metadata, schema=schema)

    fields = header(table)
    cols = columns(table)

    for f in fields:
        sql_column = make_sqlalchemy_column(cols[f], f, constraints=constraints)
        sql_table.append_column(sql_column)

    return sql_table
예제 #3
0
def make_sqlalchemy_table(table, tablename, schema=None, constraints=True, metadata=None):
    """
    Create an SQLAlchemy table based on a :mod:`petl` table.

    Parameters
    ----------

    table : sequence of sequences (petl table)
        Table data to use to infer types etc.
    tablename : string
        Name of the table
    schema : string
        Name of the database schema to create the table in
    constraints : bool
        If True use length and nullable constraints
    metadata : sqlalchemy.MetaData
        Custom table metadata

    """

    try:
        import sqlalchemy
    except ImportError as e:
        raise UnsatisfiedDependency(e, dep_message)

    if not metadata:
        metadata = sqlalchemy.MetaData()

    sql_table = sqlalchemy.Table(tablename, metadata, schema=schema)

    fields = header(table)
    cols = columns(table)

    for f in fields:
        sql_column = make_sqlalchemy_column(cols[f], f, constraints=constraints)
        sql_table.append_column(sql_column)

    return sql_table
예제 #4
0
def test_columns():

    table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
    cols = columns(table)
    eq_(['a', 'b', 'b'], cols['foo'])
    eq_([1, 2, 3], cols['bar'])
예제 #5
0
파일: test_util.py 프로젝트: obsoleter/petl
def test_columns():
    
    table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
    cols = columns(table)
    eq_(['a', 'b', 'b'], cols['foo'])
    eq_([1, 2, 3], cols['bar'])
예제 #6
0
파일: test_util.py 프로젝트: greeness/petl
def test_columns():

    table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
    cols = columns(table)
    assertequal(['a', 'b', 'b'], cols['foo'])
    assertequal([1, 2, 3], cols['bar'])
예제 #7
0
파일: test_util.py 프로젝트: greeness/petl
def test_columns():
    
    table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
    cols = columns(table)
    assertequal(['a', 'b', 'b'], cols['foo'])
    assertequal([1, 2, 3], cols['bar'])
예제 #8
0
from __future__ import division, print_function, absolute_import

# columns()
###########

import petl as etl
table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
cols = etl.columns(table)
cols['foo']
cols['bar']

# facetcolumns()
################

import petl as etl
table = [['foo', 'bar', 'baz'], ['a', 1, True], ['b', 2, True], ['b', 3]]
fc = etl.facetcolumns(table, 'foo')
fc['a']
fc['b']
예제 #9
0
파일: test_util.py 프로젝트: talwai/petl
def test_columns():

    table = [["foo", "bar"], ["a", 1], ["b", 2], ["b", 3]]
    cols = columns(table)
    eq_(["a", "b", "b"], cols["foo"])
    eq_([1, 2, 3], cols["bar"])
예제 #10
0
파일: materialise.py 프로젝트: DeanWay/petl
from __future__ import division, print_function, absolute_import


# columns()
###########

import petl as etl
table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]]
cols = etl.columns(table)
cols['foo']
cols['bar']


# facetcolumns()
################

import petl as etl
table = [['foo', 'bar', 'baz'],
         ['a', 1, True],
         ['b', 2, True],
         ['b', 3]]
fc = etl.facetcolumns(table, 'foo')
fc['a']
fc['b']